diff --git a/.coveragerc36 b/.coveragerc36 deleted file mode 100644 index 8642882ab1..0000000000 --- a/.coveragerc36 +++ /dev/null @@ -1,14 +0,0 @@ -# This is the coverage.py config for Python 3.6 -# The config for newer Python versions is in pyproject.toml. - -[run] -branch = true -omit = - /tmp/* - */tests/* - */.venv/* - - -[report] -exclude_lines = - if TYPE_CHECKING: diff --git a/.github/workflows/test-integrations-ai.yml b/.github/workflows/test-integrations-ai.yml index dd57f5909b..e1a84cc240 100644 --- a/.github/workflows/test-integrations-ai.yml +++ b/.github/workflows/test-integrations-ai.yml @@ -30,17 +30,10 @@ jobs: fail-fast: false matrix: python-version: ["3.9","3.11","3.12"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -74,14 +67,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-huggingface_hub-latest" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml @@ -109,17 +96,10 @@ jobs: fail-fast: false matrix: python-version: ["3.8","3.9","3.10","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -153,14 +133,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-huggingface_hub" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-cloud.yml b/.github/workflows/test-integrations-cloud.yml index e79c9513ef..65f304ed05 100644 --- a/.github/workflows/test-integrations-cloud.yml +++ b/.github/workflows/test-integrations-cloud.yml @@ -30,21 +30,14 @@ jobs: fail-fast: false matrix: python-version: ["3.8","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-22.04] services: docker: image: docker:dind # Required for Docker network management options: --privileged # Required for Docker-in-Docker operations - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -74,14 +67,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-gcp-latest" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml @@ -108,22 +95,15 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + python-version: ["3.7","3.8","3.9","3.11","3.12","3.13"] os: [ubuntu-22.04] services: docker: image: docker:dind # Required for Docker network management options: --privileged # Required for Docker-in-Docker operations - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -153,14 +133,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-gcp" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-common.yml b/.github/workflows/test-integrations-common.yml index c7e356420c..3405d401fb 100644 --- a/.github/workflows/test-integrations-common.yml +++ b/.github/workflows/test-integrations-common.yml @@ -29,18 +29,11 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -54,14 +47,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-common" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-dbs.yml b/.github/workflows/test-integrations-dbs.yml index 6c203379fe..2ba282a85e 100644 --- a/.github/workflows/test-integrations-dbs.yml +++ b/.github/workflows/test-integrations-dbs.yml @@ -29,11 +29,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7","3.8","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + python-version: ["3.8","3.11","3.12"] os: [ubuntu-22.04] services: postgres: @@ -50,15 +46,12 @@ jobs: ports: - 5432:5432 env: - SENTRY_PYTHON_TEST_POSTGRES_HOST: ${{ matrix.python-version == '3.6' && 'postgres' || 'localhost' }} + SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -94,14 +87,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-sqlalchemy-latest" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml @@ -128,11 +115,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] services: postgres: @@ -149,15 +132,12 @@ jobs: ports: - 5432:5432 env: - SENTRY_PYTHON_TEST_POSTGRES_HOST: ${{ matrix.python-version == '3.6' && 'postgres' || 'localhost' }} + SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -193,14 +173,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-sqlalchemy" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-flags.yml b/.github/workflows/test-integrations-flags.yml index 926465990d..07f97b4e00 100644 --- a/.github/workflows/test-integrations-flags.yml +++ b/.github/workflows/test-integrations-flags.yml @@ -30,17 +30,10 @@ jobs: fail-fast: false matrix: python-version: ["3.7","3.8","3.9","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -66,14 +59,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-unleash" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-gevent.yml b/.github/workflows/test-integrations-gevent.yml index a08e91c909..2b41a0b9c8 100644 --- a/.github/workflows/test-integrations-gevent.yml +++ b/.github/workflows/test-integrations-gevent.yml @@ -29,18 +29,11 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.8","3.10","3.11","3.12"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + python-version: ["3.8","3.10","3.11","3.12"] os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -54,14 +47,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-gevent" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-graphql.yml b/.github/workflows/test-integrations-graphql.yml index 9bbeee6c6a..54fca489b9 100644 --- a/.github/workflows/test-integrations-graphql.yml +++ b/.github/workflows/test-integrations-graphql.yml @@ -29,18 +29,11 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -66,14 +59,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-strawberry" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-misc.yml b/.github/workflows/test-integrations-misc.yml index 3595640ce1..e77f138546 100644 --- a/.github/workflows/test-integrations-misc.yml +++ b/.github/workflows/test-integrations-misc.yml @@ -29,18 +29,11 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -74,14 +67,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-typer" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-network.yml b/.github/workflows/test-integrations-network.yml index 3ac5508dab..edf1857194 100644 --- a/.github/workflows/test-integrations-network.yml +++ b/.github/workflows/test-integrations-network.yml @@ -30,17 +30,10 @@ jobs: fail-fast: false matrix: python-version: ["3.9","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -62,14 +55,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-requests-latest" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml @@ -96,18 +83,11 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -129,14 +109,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-requests" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-tasks.yml b/.github/workflows/test-integrations-tasks.yml index 13c34224be..009fbcab13 100644 --- a/.github/workflows/test-integrations-tasks.yml +++ b/.github/workflows/test-integrations-tasks.yml @@ -30,17 +30,10 @@ jobs: fail-fast: false matrix: python-version: ["3.7","3.8","3.10","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -89,14 +82,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-spark-latest" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml @@ -123,18 +110,11 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -183,14 +163,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-spark" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-web-1.yml b/.github/workflows/test-integrations-web-1.yml index e52a903208..7f3e1b104b 100644 --- a/.github/workflows/test-integrations-web-1.yml +++ b/.github/workflows/test-integrations-web-1.yml @@ -29,11 +29,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] services: postgres: @@ -50,15 +46,12 @@ jobs: ports: - 5432:5432 env: - SENTRY_PYTHON_TEST_POSTGRES_HOST: ${{ matrix.python-version == '3.6' && 'postgres' || 'localhost' }} + SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -84,14 +77,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-fastapi" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-web-2.yml b/.github/workflows/test-integrations-web-2.yml index c703cfafce..2477e5925f 100644 --- a/.github/workflows/test-integrations-web-2.yml +++ b/.github/workflows/test-integrations-web-2.yml @@ -30,17 +30,10 @@ jobs: fail-fast: false matrix: python-version: ["3.8","3.9","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -90,14 +83,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-tornado-latest" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml @@ -124,18 +111,11 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -185,14 +165,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-tornado" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.gitignore b/.gitignore index 0dad53b2f4..e6dfb71607 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,7 @@ *.db *.pid .python-version +.tool-versions .coverage .coverage-sentry* coverage.xml @@ -28,6 +29,7 @@ relay pip-wheel-metadata .mypy_cache .vscode/ +toxgen.venv # for running AWS Lambda tests using AWS SAM sam.template.yaml diff --git a/CHANGELOG.md b/CHANGELOG.md index 21b1d5fec9..93e61a8363 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,69 @@ # Changelog +## 3.0.0a5 + +### Various fixes & improvements + +- Add empty tags by default to transaction (#4683) by @sl0thentr0py +- Force creation of a new trace in continue_trace with empty headers (#4682) by @sl0thentr0py +- mypy (#3152) by @sl0thentr0py +- Add `update_current_span` to SDK 3 (#4679) by @sl0thentr0py +- feat(tracing): Add convenience function `update_current_span`. (#4673) by @antonpirker +- Update `gen_ai.*` and `ai.*` attributes (#4665) by @antonpirker +- Add `update_data` to `Span`. (#4666) by @antonpirker +- Fix plugins key codecov (#4655) by @sl0thentr0py + +## 3.0.0a4 + +### Various fixes & improvements + +- Migrate more type comments to annotations (#4651) by @sl0thentr0py +- ref: Drop experimental logs options in 3.0 (#4653) by @sl0thentr0py +- Polish migration guide (#4650) by @sl0thentr0py +- Add `enable_logs`, `before_send_log` as top-level options (#4644) by @sentrivana +- Add missing return type annotation (#3152) by @sl0thentr0py +- Fix mypy (#4649) by @sentrivana +- Better checking for empty tools list (#4647) by @antonpirker +- ref: Remove `MAX_EVENT_BYTES` (#4630) by @sl0thentr0py +- update changelog (9276f2a1) by @antonpirker +- release: 2.34.1 (a71ef66d) by @getsentry-bot +- typing (#3152) by @sl0thentr0py +- Update tests (#3152) by @sl0thentr0py +- Span data is always be a primitive data type (#4643) by @antonpirker +- Fix typo in CHANGELOG.md (#4640) by @jgillard +- updated test matrix (#3152) by @sl0thentr0py +- Add new_trace api to force a new trace (#4642) by @sl0thentr0py +- Revert "Add new_trace api to force a new trace" (#3152) by @sl0thentr0py +- Add new_trace api to force a new trace (#3152) by @sl0thentr0py +- Update changelog (72766a79) by @antonpirker +- Update CHANGELOG.md (e1848d4f) by @sentrivana +- release: 2.34.0 (e84f6f30) by @getsentry-bot +- Considerably raise `DEFAULT_MAX_VALUE_LENGTH` (#4632) by @sentrivana +- fix(celery): Latency should be in milliseconds, not seconds (#4637) by @sentrivana +- OpenAI integration update (#4612) by @antonpirker + +_Plus 16 more_ + +## 3.0.0a3 + +We're excited to announce that version 3.0 of the Sentry Python SDK is now +available. This release is the result of a long-term effort to use OpenTelemetry +under the hood for tracing. This switch opens the door for us to leverage the +full power of OpenTelemetry, so stay tuned for more integrations and features +in future releases. + +Looking to upgrade from Sentry SDK 2.x to 3.x? See the +[full list of changes](MIGRATION_GUIDE.md) for a comprehensive overview +of what's changed. Looking for a more digestible summary? See the +[migration guide in the docs](https://docs.sentry.io/platforms/python/migration/2.x-to-3.x) +with the most common migration patterns. + +⚠️ This is a pre-release. If you feel like taking it for a spin, we'd be grateful +for your feedback. How was the migration? Is everything working as expected? Is +*nothing* working as expected? Something in between? Please let us know +[on GitHub](https://github.com/getsentry/sentry-python/discussions/3936) or +[on Discord](https://discord.com/invite/Ww9hbqr). + ## 2.34.1 ### Various fixes & improvements diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 024a374f85..1d3c594025 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -163,12 +163,12 @@ This project follows [semver](https://semver.org/), with three additions: - Certain features (e.g. integrations) may be explicitly called out as "experimental" or "unstable" in the documentation. They come with their own versioning policy described in the documentation. -We recommend to pin your version requirements against `2.x.*` or `2.x.y`. +We recommend to pin your version requirements against `3.x.*` or `3.x.y`. Either one of the following is fine: ``` -sentry-sdk>=2.0.0,<3.0.0 -sentry-sdk==2.4.0 +sentry-sdk>=3.0.0,<4.0.0 +sentry-sdk==3.5.0 ``` A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 53396a37ba..a957c67ed8 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -1,13 +1,265 @@ -# Sentry SDK 2.0 Migration Guide +# Sentry SDK Migration Guide -Looking to upgrade from Sentry SDK 1.x to 2.x? Here's a comprehensive list of what's changed. Looking for a more digestable summary? See the [guide in the docs](https://docs.sentry.io/platforms/python/migration/1.x-to-2.x) with the most common migration patterns. +## Upgrading to 3.0 -## New Features +Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of what's changed. Looking for a more digestible summary? See the [guide in the docs](https://docs.sentry.io/platforms/python/migration/2.x-to-3.x) with the most common migration patterns. + +### New Features + +- Added `add_attachment()` as a top level API, so you can do now: `sentry_sdk.add_attachment(...)` (up until now it was only available on the `Scope`) +- Added a new SDK option `exclude_span_origins`. Spans with an `origin` from `exclude_span_origins` won't be created. This can be used for example in dual OpenTelemetry/Sentry setups to filter out spans from specific Sentry instrumentations. Note that using `exclude_span_origins` might potentially lead to surprising results: if, for example, a root span is excluded based on `origin`, all of its children will become root spans, unless they were started with `only_as_child_span=True`. + +### Changed + +#### General + +- The SDK now supports Python 3.7 and higher. +- Tag values on event dictionaries, which are passed to `before_send` and `before_send_transaction`, now are always `str` values. Previously, despite tag values being typed as `str`, they often had different values. Therefore, if you have configured any `before_send` and `before_send_transaction` functions which perform some logic based on tag values, you need to check and if needed update those functions to correctly handle `str` values. + +#### Error Capturing + +- We updated how we handle `ExceptionGroup`s. You will now get more data if `ExceptionGroup`s are appearing in chained exceptions. It could happen that after updating the SDK the grouping of issues change because of this. So eventually you will see the same exception in two Sentry issues (one from before the update, one from after the update). + +#### Tracing + +- The default of `traces_sample_rate` changed to `0`. Meaning: Incoming traces will be continued by default. For example, if your frontend sends a `sentry-trace/baggage` headers pair, your SDK will create Spans and send them to Sentry. (The default used to be `None` meaning by default no Spans where created, no matter what headers the frontend sent to your project.) See also: https://docs.sentry.io/platforms/python/configuration/options/#traces_sample_rate +- `sentry_sdk.start_span` now only takes keyword arguments. +- `sentry_sdk.start_transaction`/`sentry_sdk.start_span` no longer takes the following arguments: `span`, `parent_sampled`, `trace_id`, `span_id` or `parent_span_id`. +- `sentry_sdk.continue_trace` no longer returns a `Transaction` and is now a context manager. + + - Use it to continue an upstream trace with the `sentry-trace` and `baggage` headers. + + ```python + headers = {"sentry-trace": "{trace_id}-{span_id}-{sampled_flag}", "baggage": "{baggage header}"} + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span(name="continued span in trace"): + pass + ``` + + - If the headers are empty, a new trace will be started. + - If you want to force creation of a new trace, use the `sentry_sdk.new_trace` context manager. + +- You can no longer change the sampled status of a span with `span.sampled = False` after starting it. The sampling decision needs to be either be made in the `traces_sampler`, or you need to pass an explicit `sampled` parameter to `start_span`. +- `sentry_sdk.start_span` now takes an optional `only_as_child_span` argument. These spans will not be started if they would be root spans -- they can only exist as child spans. You can use this parameter to prevent spans without a parent from becoming root spans. +- The `Span()` constructor does not accept a `hub` parameter anymore. +- `Span.finish()` does not accept a `hub` parameter anymore. +- `Span.finish()` no longer returns the `event_id` if the event is sent to Sentry. +- The `sampling_context` argument of `traces_sampler` now additionally contains all span attributes known at span start. +- The `SentrySpanProcessor` and `SentryPropagator` are exported from `sentry_sdk.opentelemetry` instead of `sentry_sdk.integrations.opentelemetry`. + +#### Profiling + +- The `sampling_context` argument of `profiles_sampler` now additionally contains all span attributes known at span start. +- The `Profile()` constructor does not accept a `hub` parameter anymore. +- A `Profile` object does not have a `.hub` property anymore. +- `MAX_PROFILE_DURATION_NS`, `PROFILE_MINIMUM_SAMPLES`, `Profile`, `Scheduler`, `ThreadScheduler`, `GeventScheduler`, `has_profiling_enabled`, `setup_profiler`, `teardown_profiler` are no longer accessible from `sentry_sdk.profiler`. They're still accessible from `sentry_sdk.profiler.transaction_profiler`. +- `DEFAULT_SAMPLING_FREQUENCY`, `MAX_STACK_DEPTH`, `get_frame_name`, `extract_frame`, `extract_stack`, `frame_id` are no longer accessible from `sentry_sdk.profiler`. They're still accessible from `sentry_sdk.profiler.utils`. + +#### Logs + +- `enable_logs` and `before_send_log` are now regular SDK options. Their original versions under `_experiments` have been removed. + +#### Integrations + +- AWS Lambda, GCP: The message of the warning the SDK optionally emits if a function is about to time out has changed. +- Redis: In Redis pipeline spans there is no `span["data"]["redis.commands"]` that contains a dict `{"count": 3, "first_ten": ["cmd1", "cmd2", ...]}` but instead `span["data"]["redis.commands.count"]` (containing `3`) and `span["data"]["redis.commands.first_ten"]` (containing `["cmd1", "cmd2", ...]`). +- clickhouse-driver: The query is now available under the `db.query.text` span attribute (only if `send_default_pii` is `True`). +- Logging: By default, the SDK won't capture Sentry issues anymore when calling `logging.error()`, `logging.critical()` or `logging.exception()`. If you want to preserve the old behavior use `sentry_sdk.init(integrations=[LoggingIntegration(event_level="ERROR")])`. +- The integration-specific content of the `sampling_context` argument of `traces_sampler` and `profiles_sampler` now looks different. + + - The Celery integration doesn't add the `celery_job` dictionary anymore. Instead, the individual keys are now available as: + + | Dictionary keys | Sampling context key | Example | + | ---------------------- | --------------------------- | ------------------------------ | + | `celery_job["args"]` | `celery.job.args.{index}` | `celery.job.args.0` | + | `celery_job["kwargs"]` | `celery.job.kwargs.{kwarg}` | `celery.job.kwargs.kwarg_name` | + | `celery_job["task"]` | `celery.job.task` | | + + Note that all of these are serialized, i.e., not the original `args` and `kwargs` but rather OpenTelemetry-friendly span attributes. + + - The AIOHTTP integration doesn't add the `aiohttp_request` object anymore. Instead, some of the individual properties of the request are accessible, if available, as follows: + + | Request property | Sampling context key(s) | + | ----------------- | ------------------------------- | + | `path` | `url.path` | + | `query_string` | `url.query` | + | `method` | `http.request.method` | + | `host` | `server.address`, `server.port` | + | `scheme` | `url.scheme` | + | full URL | `url.full` | + | `request.headers` | `http.request.header.{header}` | + + - The Tornado integration doesn't add the `tornado_request` object anymore. Instead, some of the individual properties of the request are accessible, if available, as follows: + + | Request property | Sampling context key(s) | + | ----------------- | --------------------------------------------------- | + | `path` | `url.path` | + | `query` | `url.query` | + | `protocol` | `url.scheme` | + | `method` | `http.request.method` | + | `host` | `server.address`, `server.port` | + | `version` | `network.protocol.name`, `network.protocol.version` | + | full URL | `url.full` | + | `request.headers` | `http.request.header.{header}` | + + - The WSGI integration doesn't add the `wsgi_environ` object anymore. Instead, the individual properties of the environment are accessible, if available, as follows: + + | Env property | Sampling context key(s) | + | ----------------- | ------------------------------------------------- | + | `PATH_INFO` | `url.path` | + | `QUERY_STRING` | `url.query` | + | `REQUEST_METHOD` | `http.request.method` | + | `SERVER_NAME` | `server.address` | + | `SERVER_PORT` | `server.port` | + | `SERVER_PROTOCOL` | `server.protocol.name`, `server.protocol.version` | + | `wsgi.url_scheme` | `url.scheme` | + | full URL | `url.full` | + | `HTTP_*` | `http.request.header.{header}` | + + - The ASGI integration doesn't add the `asgi_scope` object anymore. Instead, the individual properties of the scope, if available, are accessible as follows: + + | Scope property | Sampling context key(s) | + | -------------- | ------------------------------- | + | `type` | `network.protocol.name` | + | `scheme` | `url.scheme` | + | `path` | `url.path` | + | `query` | `url.query` | + | `http_version` | `network.protocol.version` | + | `method` | `http.request.method` | + | `server` | `server.address`, `server.port` | + | `client` | `client.address`, `client.port` | + | full URL | `url.full` | + | `headers` | `http.request.header.{header}` | + + - The RQ integration doesn't add the `rq_job` object anymore. Instead, the individual properties of the job and the queue, if available, are accessible as follows: + + | RQ property | Sampling context key | Example | + | --------------- | ---------------------------- | ---------------------- | + | `rq_job.args` | `rq.job.args.{index}` | `rq.job.args.0` | + | `rq_job.kwargs` | `rq.job.kwargs.{kwarg}` | `rq.job.args.my_kwarg` | + | `rq_job.func` | `rq.job.func` | | + | `queue.name` | `messaging.destination.name` | | + | `rq_job.id` | `messaging.message.id` | | + + Note that `rq.job.args`, `rq.job.kwargs`, and `rq.job.func` are serialized and not the actual objects on the job. + + - The AWS Lambda integration doesn't add the `aws_event` and `aws_context` objects anymore. Instead, the following, if available, is accessible: + + | AWS property | Sampling context key(s) | + | ------------------------------------------- | ------------------------------- | + | `aws_event["httpMethod"]` | `http.request.method` | + | `aws_event["queryStringParameters"]` | `url.query` | + | `aws_event["path"]` | `url.path` | + | full URL | `url.full` | + | `aws_event["headers"]["X-Forwarded-Proto"]` | `network.protocol.name` | + | `aws_event["headers"]["Host"]` | `server.address` | + | `aws_context["function_name"]` | `faas.name` | + | `aws_event["headers"]` | `http.request.headers.{header}` | + + - The GCP integration doesn't add the `gcp_env` and `gcp_event` keys anymore. Instead, the following, if available, is accessible: + + | Old sampling context key | New sampling context key | + | --------------------------------- | ------------------------------ | + | `gcp_env["function_name"]` | `faas.name` | + | `gcp_env["function_region"]` | `faas.region` | + | `gcp_env["function_project"]` | `gcp.function.project` | + | `gcp_env["function_identity"]` | `gcp.function.identity` | + | `gcp_env["function_entry_point"]` | `gcp.function.entry_point` | + | `gcp_event.method` | `http.request.method` | + | `gcp_event.query_string` | `url.query` | + | `gcp_event.headers` | `http.request.header.{header}` | + +#### Internals + +- The `sentry_sdk.Scope()` constructor no longer accepts a `client` parameter. +- `sentry_sdk.init` now returns `None` instead of a context manager. + +### Removed + +#### General + +- Dropped support for Python 3.6. +- `set_measurement` has been removed. +- Setting `Scope.user` directly is no longer supported. Use `Scope.set_user()` instead. + +#### Tracing + +- The `enable_tracing` `init` option has been removed. Configure `traces_sample_rate` directly. +- The `propagate_traces` `init` option has been removed. Use `trace_propagation_targets` instead. +- The `custom_sampling_context` parameter of `start_transaction` has been removed. Use `attributes` instead to set key-value pairs of data that should be accessible in the traces sampler. Note that span attributes need to conform to the [OpenTelemetry specification](https://opentelemetry.io/docs/concepts/signals/traces/#attributes), meaning only certain types can be set as values. +- When setting span status, the HTTP status code is no longer automatically added as a tag. +- `start_transaction` is deprecated and no longer takes the following arguments: + - `trace_id`, `baggage`: use `continue_trace` for propagation from headers or environment variables + - `same_process_as_parent` + - `span_id` + - `parent_span_id`: you can supply a `parent_span` instead +- The `Scope.transaction` property has been removed. To obtain the root span (previously transaction), use `Scope.root_span`. To set the root span's (transaction's) name, use `Scope.set_transaction_name()`. +- The `Scope.span =` setter has been removed. Please use the new `span.activate()` api instead if you want to activate a new span manually instead of using the `start_span` context manager. +- `span.containing_transaction` has been removed. Use `span.root_span` instead. +- `continue_from_headers`, `continue_from_environ` and `from_traceparent` have been removed, please use top-level API `sentry_sdk.continue_trace` instead. +- `Baggage.populate_from_transaction` has been removed. + +#### Integrations + +- PyMongo: The integration no longer sets tags. The data is still accessible via span attributes. +- PyMongo: The integration doesn't set `operation_ids` anymore. The individual IDs (`operation_id`, `request_id`, `session_id`) are now accessible as separate span attributes. +- Django: Dropped support for Django versions below 2.0. +- trytond: Dropped support for trytond versions below 5.0. +- Falcon: Dropped support for Falcon versions below 3.0. +- eventlet: Dropped support for eventlet completely. +- Threading: The integration no longer takes the `propagate_hub` argument. +- Starlette: Passing a list or `None` for `failed_request_status_codes` is no longer supported. Pass a set of integers instead. + +#### Profiling + +- `profiles_sample_rate` and `profiler_mode` were removed from options available via `_experiments`. Use the top-level `profiles_sample_rate` and `profiler_mode` options instead. + +#### Transport + +- `Transport.capture_event` has been removed. Use `Transport.capture_envelope` instead. +- Function transports are no longer supported. Subclass the `Transport` instead. + +#### Sessions + +- The context manager `auto_session_tracking()` has been removed. Use `track_session()` instead. +- The context manager `auto_session_tracking_scope()` has been removed. Use `track_session()` instead. +- Utility function `is_auto_session_tracking_enabled()` has been removed. There is no public replacement. There is a private `_is_auto_session_tracking_enabled()` (if you absolutely need this function) It accepts a `scope` parameter instead of the previously used `hub` parameter. +- Utility function `is_auto_session_tracking_enabled_scope()` has been removed. There is no public replacement. There is a private `_is_auto_session_tracking_enabled()` (if you absolutely need this function). + +#### Metrics + +- `sentry_sdk.metrics` and associated metrics APIs have been removed as Sentry no longer accepts metrics data in this form. See https://sentry.zendesk.com/hc/en-us/articles/26369339769883-Upcoming-API-Changes-to-Metrics +- The experimental options `enable_metrics`, `before_emit_metric` and `metric_code_locations` have been removed. + +#### Internals + +- Class `Hub` has been removed. +- Class `_ScopeManager` has been removed. +- `PropagationContext` constructor no longer takes a `dynamic_sampling_context` but takes a `baggage` object instead. +- Setting `scope.level` has been removed. Use `scope.set_level` instead. +- `debug.configure_debug_hub` was removed. +- The `span` argument of `Scope.trace_propagation_meta` is no longer supported. + + +### Deprecated + +- `sentry_sdk.start_transaction()` is deprecated. Use `sentry_sdk.start_span()` instead. +- `Span.set_data()` is deprecated. Use `Span.set_attribute()` instead. + + +--------------------------------------------------------------------------------- + + +## Upgrading to 2.0 + +Looking to upgrade from Sentry SDK 1.x to 2.x? Here's a comprehensive list of what's changed. Looking for a more digestible summary? See the [guide in the docs](https://docs.sentry.io/platforms/python/migration/1.x-to-2.x) with the most common migration patterns. + +### New Features - Additional integrations will now be activated automatically if the SDK detects the respective package is installed: Ariadne, ARQ, asyncpg, Chalice, clickhouse-driver, GQL, Graphene, huey, Loguru, PyMongo, Quart, Starlite, Strawberry. - While refactoring the [inner workings](https://docs.sentry.io/platforms/python/enriching-events/scopes/) of the SDK we added new top-level APIs for custom instrumentation called `new_scope` and `isolation_scope`. See the [Deprecated](#deprecated) section to see how they map to the existing APIs. -## Changed +### Changed - The Pyramid integration will not capture errors that might happen in `authenticated_userid()` in a custom `AuthenticationPolicy` class. - The method `need_code_loation` of the `MetricsAggregator` was renamed to `need_code_location`. @@ -19,7 +271,7 @@ Looking to upgrade from Sentry SDK 1.x to 2.x? Here's a comprehensive list of wh - The actual implementation of `get_current_span` was moved to `sentry_sdk.tracing_utils`. `sentry_sdk.get_current_span` is still accessible as part of the top-level API. - `sentry_sdk.tracing_utils.add_query_source()`: Removed the `hub` parameter. It is not necessary anymore. - `sentry_sdk.tracing_utils.record_sql_queries()`: Removed the `hub` parameter. It is not necessary anymore. -- `sentry_sdk.tracing_utils.get_current_span()` does now take a `scope` instead of a `hub` as parameter. +- `sentry_sdk.tracing_utils.get_current_span()` now takes a `scope` instead of a `hub` as parameter. - `sentry_sdk.tracing_utils.should_propagate_trace()` now takes a `Client` instead of a `Hub` as first parameter. - `sentry_sdk.utils.is_sentry_url()` now takes a `Client` instead of a `Hub` as first parameter. - `sentry_sdk.utils._get_contextvars` does not return a tuple with three values, but a tuple with two values. The `copy_context` was removed. @@ -59,7 +311,7 @@ Looking to upgrade from Sentry SDK 1.x to 2.x? Here's a comprehensive list of wh -## Removed +### Removed - Removed support for Python 2 and Python 3.5. The SDK now requires at least Python 3.6. - Removed support for Celery 3.\*. @@ -82,7 +334,7 @@ Looking to upgrade from Sentry SDK 1.x to 2.x? Here's a comprehensive list of wh - Removed the experimental `metrics_summary_sample_rate` config option. - Removed the experimental `should_summarize_metric` config option. -## Deprecated +### Deprecated - Using the `Hub` directly as well as using hub-based APIs has been deprecated. Where available, use [the top-level API instead](sentry_sdk/api.py); otherwise use the [scope API](sentry_sdk/scope.py) or the [client API](sentry_sdk/client.py). diff --git a/constraints.txt b/constraints.txt new file mode 100644 index 0000000000..697aca1388 --- /dev/null +++ b/constraints.txt @@ -0,0 +1,3 @@ +# Workaround for https://github.com/pypa/setuptools/issues/4519. +# Applies only for Django tests. +setuptools<72.0.0 diff --git a/docs/api.rst b/docs/api.rst index 7d59030033..c09b5e9468 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -61,7 +61,4 @@ Client Management Managing Scope (advanced) ========================= -.. autofunction:: sentry_sdk.api.configure_scope -.. autofunction:: sentry_sdk.api.push_scope - .. autofunction:: sentry_sdk.api.new_scope diff --git a/docs/apidocs.rst b/docs/apidocs.rst index a3c8a6e150..ffe265b276 100644 --- a/docs/apidocs.rst +++ b/docs/apidocs.rst @@ -2,9 +2,6 @@ API Docs ======== -.. autoclass:: sentry_sdk.Hub - :members: - .. autoclass:: sentry_sdk.Scope :members: diff --git a/docs/conf.py b/docs/conf.py index f5d0b9e121..2485aefd97 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -13,6 +13,8 @@ import sphinx.ext.autodoc # noqa: F401 import sphinx.ext.intersphinx # noqa: F401 import urllib3.exceptions # noqa: F401 +import importlib_metadata # noqa: F401 +import opentelemetry.sdk.metrics._internal # noqa: F401 typing.TYPE_CHECKING = True @@ -31,7 +33,7 @@ copyright = "2019-{}, Sentry Team and Contributors".format(datetime.now().year) author = "Sentry Team and Contributors" -release = "2.34.1" +release = "3.0.0a5" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/pyproject.toml b/pyproject.toml index e5eae2c21f..d9993a099a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -214,3 +214,6 @@ exclude = [ "grpc_test_service_pb2.py", "grpc_test_service_pb2_grpc.py", ] +per-file-ignores = [ + "sentry_sdk/integrations/spark/*:N802,N803", +] diff --git a/requirements-aws-lambda-layer.txt b/requirements-aws-lambda-layer.txt index 8a6ff63aa7..f99908dd89 100644 --- a/requirements-aws-lambda-layer.txt +++ b/requirements-aws-lambda-layer.txt @@ -6,3 +6,5 @@ urllib3 # So we pin this here to make our Lambda layer work with # Lambda Function using Python 3.7+ urllib3<1.27; python_version < "3.10" + +opentelemetry-distro>=0.35b0 diff --git a/requirements-testing.txt b/requirements-testing.txt index 5cd669af9a..8e7bc47be0 100644 --- a/requirements-testing.txt +++ b/requirements-testing.txt @@ -13,5 +13,6 @@ pysocks socksio httpcore[http2] setuptools +freezegun Brotli docker diff --git a/scripts/build_aws_lambda_layer.py b/scripts/build_aws_lambda_layer.py index a7e2397546..faace9e50e 100644 --- a/scripts/build_aws_lambda_layer.py +++ b/scripts/build_aws_lambda_layer.py @@ -3,12 +3,10 @@ import subprocess import sys import tempfile -from typing import TYPE_CHECKING +from typing import Optional from sentry_sdk.consts import VERSION as SDK_VERSION -if TYPE_CHECKING: - from typing import Optional DIST_PATH = "dist" # created by "make dist" that is called by "make aws-lambda-layer" PYTHON_SITE_PACKAGES = "python" # see https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html#configuration-layers-path @@ -17,10 +15,9 @@ class LayerBuilder: def __init__( self, - base_dir, # type: str - out_zip_filename=None, # type: Optional[str] - ): - # type: (...) -> None + base_dir: str, + out_zip_filename: Optional[str] = None, + ) -> None: self.base_dir = base_dir self.python_site_packages = os.path.join(self.base_dir, PYTHON_SITE_PACKAGES) self.out_zip_filename = ( @@ -29,12 +26,10 @@ def __init__( else out_zip_filename ) - def make_directories(self): - # type: (...) -> None + def make_directories(self) -> None: os.makedirs(self.python_site_packages) - def install_python_packages(self): - # type: (...) -> None + def install_python_packages(self) -> None: # Install requirements for Lambda Layer (these are more limited than the SDK requirements, # because Lambda does not support the newest versions of some packages) subprocess.check_call( @@ -68,8 +63,7 @@ def install_python_packages(self): check=True, ) - def create_init_serverless_sdk_package(self): - # type: (...) -> None + def create_init_serverless_sdk_package(self) -> None: """ Method that creates the init_serverless_sdk pkg in the sentry-python-serverless zip @@ -84,8 +78,7 @@ def create_init_serverless_sdk_package(self): "scripts/init_serverless_sdk.py", f"{serverless_sdk_path}/__init__.py" ) - def zip(self): - # type: (...) -> None + def zip(self) -> None: subprocess.run( [ "zip", diff --git a/scripts/init_serverless_sdk.py b/scripts/init_serverless_sdk.py index 9b4412c420..341d8d6832 100644 --- a/scripts/init_serverless_sdk.py +++ b/scripts/init_serverless_sdk.py @@ -9,15 +9,11 @@ import os import sys import re +from typing import Any import sentry_sdk from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from typing import Any - # Configure Sentry SDK sentry_sdk.init( @@ -50,8 +46,8 @@ def extract_and_load_lambda_function_module(self, module_path): module_name = module_path.split(os.path.sep)[-1] module_file_path = module_path + ".py" - # Supported python versions are 3.6, 3.7, 3.8 - if py_version >= (3, 6): + # Supported python versions are 3.7, 3.8 + if py_version >= (3, 7): import importlib.util spec = importlib.util.spec_from_file_location( @@ -70,8 +66,7 @@ def get_lambda_handler(self): return getattr(self.lambda_function_module, self.handler_name) -def sentry_lambda_handler(event, context): - # type: (Any, Any) -> None +def sentry_lambda_handler(event: Any, context: Any) -> None: """ Handler function that invokes a lambda handler which path is defined in environment variables as "SENTRY_INITIAL_HANDLER" diff --git a/scripts/populate_tox/README.md b/scripts/populate_tox/README.md index c9a3b67ba0..39bf627ea1 100644 --- a/scripts/populate_tox/README.md +++ b/scripts/populate_tox/README.md @@ -18,6 +18,7 @@ then determining which versions make sense to test to get good coverage. The lowest supported and latest version of a framework are always tested, with a number of releases in between: + - If the package has majors, we pick the highest version of each major. For the latest major, we also pick the lowest version in that major. - If the package doesn't have multiple majors, we pick two versions in between @@ -35,7 +36,8 @@ the main package (framework, library) to test with; any additional test dependencies, optionally gated behind specific conditions; and optionally the Python versions to test on. -Constraints are defined using the format specified below. The following sections describe each key. +Constraints are defined using the format specified below. The following sections +describe each key. ``` integration_name: { @@ -46,6 +48,7 @@ integration_name: { }, "python": python_version_specifier, "include": package_version_specifier, + "test_on_all_python_versions": bool, } ``` @@ -68,11 +71,12 @@ The test dependencies of the test suite. They're defined as a dictionary of in the package list of a rule will be installed as long as the rule applies. `rule`s are predefined. Each `rule` must be one of the following: - - `*`: packages will be always installed - - a version specifier on the main package (e.g. `<=0.32`): packages will only - be installed if the main package falls into the version bounds specified - - specific Python version(s) in the form `py3.8,py3.9`: packages will only be - installed if the Python version matches one from the list + +- `*`: packages will be always installed +- a version specifier on the main package (e.g. `<=0.32`): packages will only + be installed if the main package falls into the version bounds specified +- specific Python version(s) in the form `py3.8,py3.9`: packages will only be + installed if the Python version matches one from the list Rules can be used to specify version bounds on older versions of the main package's dependencies, for example. If e.g. Flask tests generally need @@ -101,6 +105,7 @@ Python versions, you can say: ... } ``` + This key is optional. ### `python` @@ -145,7 +150,6 @@ The `include` key can also be used to exclude a set of specific versions by usin `!=` version specifiers. For example, the Starlite restriction above could equivalently be expressed like so: - ```python "starlite": { "include": "!=2.0.0a1,!=2.0.0a2", @@ -153,6 +157,19 @@ be expressed like so: } ``` +### `test_on_all_python_versions` + +By default, the script will cherry-pick a few Python versions to test each +integration on. If you want a test suite to run on all supported Python versions +instead, set `test_on_all_python_versions` to `True`. + +```python +"common": { + # The common test suite should run on all Python versions + "test_on_all_python_versions": True, + ... +} +``` ## How-Tos @@ -176,7 +193,8 @@ A handful of integration test suites are still hardcoded. The goal is to migrate them all to `populate_tox.py` over time. 1. Remove the integration from the `IGNORE` list in `populate_tox.py`. -2. Remove the hardcoded entries for the integration from the `envlist` and `deps` sections of `tox.jinja`. +2. Remove the hardcoded entries for the integration from the `envlist` and `deps` + sections of `tox.jinja`. 3. Run `scripts/generate-test-files.sh`. 4. Run the test suite, either locally or by creating a PR. 5. Address any test failures that happen. @@ -185,6 +203,7 @@ You might have to introduce additional version bounds on the dependencies of the package. Try to determine the source of the failure and address it. Common scenarios: + - An old version of the tested package installs a dependency without defining an upper version bound on it. A new version of the dependency is installed that is incompatible with the package. In this case you need to determine which diff --git a/scripts/populate_tox/config.py b/scripts/populate_tox/config.py index f395289b4a..78bed91475 100644 --- a/scripts/populate_tox/config.py +++ b/scripts/populate_tox/config.py @@ -39,12 +39,24 @@ "package": "celery", "deps": { "*": ["newrelic", "redis"], - "py3.7": ["importlib-metadata<5.0"], }, + "python": ">=3.8", }, "clickhouse_driver": { "package": "clickhouse-driver", }, + "common": { + "package": "opentelemetry-sdk", + "test_on_all_python_versions": True, + "deps": { + "*": ["pytest", "pytest-asyncio"], + # See https://github.com/pytest-dev/pytest/issues/9621 + # and https://github.com/pytest-dev/pytest-forked/issues/67 + # for justification of the upper bound on pytest + "py3.7": ["pytest<7.0.0"], + "py3.8": ["hypothesis"], + }, + }, "cohere": { "package": "cohere", "python": ">=3.9", @@ -53,6 +65,7 @@ "package": "django", "deps": { "*": [ + "channels[daphne]", "psycopg2-binary", "djangorestframework", "pytest-django", @@ -233,7 +246,7 @@ "package": "trytond", "deps": { "*": ["werkzeug"], - "<=5.0": ["werkzeug<1.0"], + "<5.1": ["werkzeug<1.0"], }, }, "typer": { diff --git a/scripts/populate_tox/populate_tox.py b/scripts/populate_tox/populate_tox.py index 3ca5ab18c8..493d29abd0 100644 --- a/scripts/populate_tox/populate_tox.py +++ b/scripts/populate_tox/populate_tox.py @@ -61,7 +61,6 @@ "asgi", "aws_lambda", "cloud_resource_context", - "common", "gevent", "opentelemetry", "potel", @@ -353,22 +352,28 @@ def supported_python_versions( return supported -def pick_python_versions_to_test(python_versions: list[Version]) -> list[Version]: +def pick_python_versions_to_test( + python_versions: list[Version], test_all: bool = False +) -> list[Version]: """ Given a list of Python versions, pick those that make sense to test on. Currently, this is the oldest, the newest, and the second newest Python version. """ - filtered_python_versions = { - python_versions[0], - } + if test_all: + filtered_python_versions = python_versions - filtered_python_versions.add(python_versions[-1]) - try: - filtered_python_versions.add(python_versions[-2]) - except IndexError: - pass + else: + filtered_python_versions = { + python_versions[0], + } + + filtered_python_versions.add(python_versions[-1]) + try: + filtered_python_versions.add(python_versions[-2]) + except IndexError: + pass return sorted(filtered_python_versions) @@ -523,6 +528,9 @@ def _add_python_versions_to_release( time.sleep(PYPI_COOLDOWN) # give PYPI some breathing room + test_on_all_python_versions = ( + TEST_SUITE_CONFIG[integration].get("test_on_all_python_versions") or False + ) target_python_versions = TEST_SUITE_CONFIG[integration].get("python") if target_python_versions: target_python_versions = SpecifierSet(target_python_versions) @@ -531,7 +539,8 @@ def _add_python_versions_to_release( supported_python_versions( determine_python_versions(release_pypi_data), target_python_versions, - ) + ), + test_all=test_on_all_python_versions, ) release.rendered_python_versions = _render_python_versions(release.python_versions) diff --git a/scripts/populate_tox/tox.jinja b/scripts/populate_tox/tox.jinja index c67f4127d5..66b1d7885a 100644 --- a/scripts/populate_tox/tox.jinja +++ b/scripts/populate_tox/tox.jinja @@ -17,11 +17,8 @@ requires = # This version introduced using pip 24.1 which does not work with older Celery and HTTPX versions. virtualenv<20.26.3 envlist = - # === Common === - {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common - # === Gevent === - {py3.6,py3.8,py3.10,py3.11,py3.12}-gevent + {py3.8,py3.10,py3.11,py3.12}-gevent # === Integrations === # General format is {pythonversion}-{integrationname}-v{frameworkversion} @@ -55,24 +52,24 @@ envlist = {py3.8,py3.11}-beam-latest # Boto3 - {py3.6,py3.7}-boto3-v{1.12} + {py3.7}-boto3-v{1.12} {py3.7,py3.11,py3.12}-boto3-v{1.23} {py3.11,py3.12}-boto3-v{1.34} {py3.11,py3.12,py3.13}-boto3-latest # Chalice - {py3.6,py3.9}-chalice-v{1.16} + {py3.7,py3.9}-chalice-v{1.16} {py3.8,py3.12,py3.13}-chalice-latest # Cloud Resource Context - {py3.6,py3.12,py3.13}-cloud_resource_context + {py3.7,py3.12,py3.13}-cloud_resource_context # GCP {py3.7}-gcp # HTTPX - {py3.6,py3.9}-httpx-v{0.16,0.18} - {py3.6,py3.10}-httpx-v{0.20,0.22} + {py3.7,py3.9}-httpx-v{0.16,0.18} + {py3.7,py3.10}-httpx-v{0.20,0.22} {py3.7,py3.11,py3.12}-httpx-v{0.23,0.24} {py3.9,py3.11,py3.12}-httpx-v{0.25,0.27} {py3.9,py3.12,py3.13}-httpx-latest @@ -90,14 +87,8 @@ envlist = {py3.9,py3.11,py3.12}-openai-latest {py3.9,py3.11,py3.12}-openai-notiktoken - # OpenTelemetry (OTel) - {py3.7,py3.9,py3.12,py3.13}-opentelemetry - - # OpenTelemetry Experimental (POTel) - {py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-potel - # pure_eval - {py3.6,py3.12,py3.13}-pure_eval + {py3.7,py3.12,py3.13}-pure_eval # Quart {py3.7,py3.11}-quart-v{0.16} @@ -109,24 +100,22 @@ envlist = {py3.10,py3.11}-ray-latest # Redis - {py3.6,py3.8}-redis-v{3} + {py3.7,py3.8}-redis-v{3} {py3.7,py3.8,py3.11}-redis-v{4} {py3.7,py3.11,py3.12}-redis-v{5} - {py3.7,py3.12,py3.13}-redis-latest # Requests - {py3.6,py3.8,py3.12,py3.13}-requests + {py3.7,py3.8,py3.12,py3.13}-requests # RQ (Redis Queue) - {py3.6}-rq-v{0.6} - {py3.6,py3.9}-rq-v{0.13,1.0} - {py3.6,py3.11}-rq-v{1.5,1.10} + {py3.7,py3.9}-rq-v{0.13,1.0} + {py3.7,py3.11}-rq-v{1.5,1.10} {py3.7,py3.11,py3.12}-rq-v{1.15,1.16} {py3.7,py3.12,py3.13}-rq-latest # Sanic - {py3.6,py3.7}-sanic-v{0.8} - {py3.6,py3.8}-sanic-v{20} + {py3.7}-sanic-v{0.8} + {py3.8}-sanic-v{20} {py3.8,py3.11,py3.12}-sanic-v{24.6} {py3.9,py3.12,py3.13}-sanic-latest @@ -155,22 +144,13 @@ deps = linters: -r requirements-linting.txt linters: werkzeug<2.3.0 - # === Common === - py3.8-common: hypothesis - common: pytest-asyncio - # See https://github.com/pytest-dev/pytest/issues/9621 - # and https://github.com/pytest-dev/pytest-forked/issues/67 - # for justification of the upper bound on pytest - {py3.6,py3.7}-common: pytest<7.0.0 - {py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common: pytest - # === Gevent === - {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-gevent: gevent>=22.10.0, <22.11.0 + {py3.7,py3.8,py3.9,py3.10,py3.11}-gevent: gevent>=22.10.0, <22.11.0 {py3.12}-gevent: gevent # See https://github.com/pytest-dev/pytest/issues/9621 # and https://github.com/pytest-dev/pytest-forked/issues/67 # for justification of the upper bound on pytest - {py3.6,py3.7}-gevent: pytest<7.0.0 + py3.7-gevent: pytest<7.0.0 {py3.8,py3.9,py3.10,py3.11,py3.12}-gevent: pytest gevent: pytest-asyncio {py3.10,py3.11}-gevent: zope.event<5.0.0 @@ -266,12 +246,6 @@ deps = openai-latest: tiktoken~=0.6.0 openai-notiktoken: openai - # OpenTelemetry (OTel) - opentelemetry: opentelemetry-distro - - # OpenTelemetry Experimental (POTel) - potel: -e .[opentelemetry-experimental] - # pure_eval pure_eval: pure_eval @@ -296,25 +270,22 @@ deps = # Redis redis: fakeredis!=1.7.4 redis: pytest<8.0.0 - {py3.6,py3.7,py3.8}-redis: fakeredis<2.26.0 + {py3.7,py3.8}-redis: fakeredis<2.26.0 {py3.7,py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-redis: pytest-asyncio redis-v3: redis~=3.0 redis-v4: redis~=4.0 redis-v5: redis~=5.0 - redis-latest: redis # Requests requests: requests>=2.0 # RQ (Redis Queue) # https://github.com/jamesls/fakeredis/issues/245 - rq-v{0.6}: fakeredis<1.0 - rq-v{0.6}: redis<3.2.2 rq-v{0.13,1.0,1.5,1.10}: fakeredis>=1.0,<1.7.4 rq-v{1.15,1.16}: fakeredis<2.28.0 - {py3.6,py3.7}-rq-v{1.15,1.16}: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 + py3.7-rq-v{1.15,1.16}: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 rq-latest: fakeredis<2.28.0 - {py3.6,py3.7}-rq-latest: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 + py3.7-rq-latest: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 rq-v0.6: rq~=0.6.0 rq-v0.13: rq~=0.13.0 rq-v1.0: rq~=1.0.0 @@ -362,9 +333,9 @@ setenv = PYTHONDONTWRITEBYTECODE=1 OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES COVERAGE_FILE=.coverage-sentry-{envname} - py3.6: COVERAGE_RCFILE=.coveragerc36 django: DJANGO_SETTINGS_MODULE=tests.integrations.django.myapp.settings + py3.12-django: PIP_CONSTRAINT=constraints.txt spark-v{3.0.3,3.5.6}: JAVA_HOME=/usr/lib/jvm/temurin-11-jdk-amd64 common: TESTPATH=tests @@ -403,8 +374,6 @@ setenv = openai: TESTPATH=tests/integrations/openai openai_agents: TESTPATH=tests/integrations/openai_agents openfeature: TESTPATH=tests/integrations/openfeature - opentelemetry: TESTPATH=tests/integrations/opentelemetry - potel: TESTPATH=tests/integrations/opentelemetry pure_eval: TESTPATH=tests/integrations/pure_eval pymongo: TESTPATH=tests/integrations/pymongo pyramid: TESTPATH=tests/integrations/pyramid @@ -442,7 +411,6 @@ extras = pymongo: pymongo basepython = - py3.6: python3.6 py3.7: python3.7 py3.8: python3.8 py3.9: python3.9 diff --git a/scripts/split_tox_gh_actions/templates/test_group.jinja b/scripts/split_tox_gh_actions/templates/test_group.jinja index 96faefc54e..e78e0ece67 100644 --- a/scripts/split_tox_gh_actions/templates/test_group.jinja +++ b/scripts/split_tox_gh_actions/templates/test_group.jinja @@ -6,10 +6,6 @@ fail-fast: false matrix: python-version: [{{ py_versions.get(category)|join(",") }}] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-22.04] {% if needs_docker %} @@ -34,17 +30,14 @@ ports: - 5432:5432 env: - SENTRY_PYTHON_TEST_POSTGRES_HOST: {% raw %}${{ matrix.python-version == '3.6' && 'postgres' || 'localhost' }}{% endraw %} + SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry {% endif %} - # Use Docker container only for Python 3.6 - {% raw %}container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }}{% endraw %} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - {% raw %}if: ${{ matrix.python-version != '3.6' }}{% endraw %} with: python-version: {% raw %}${{ matrix.python-version }}{% endraw %} allow-prereleases: true @@ -85,15 +78,8 @@ {% endif %} {% endfor %} - - name: Generate coverage XML (Python 3.6) - if: {% raw %}${{ !cancelled() && matrix.python-version == '3.6' }}{% endraw %} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - - name: Generate coverage XML - if: {% raw %}${{ !cancelled() && matrix.python-version != '3.6' }}{% endraw %} + if: {% raw %}${{ !cancelled() }}{% endraw %} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/sentry_sdk/__init__.py b/sentry_sdk/__init__.py index a37b52ff4e..51916580db 100644 --- a/sentry_sdk/__init__.py +++ b/sentry_sdk/__init__.py @@ -1,4 +1,6 @@ -from sentry_sdk.scope import Scope +# TODO-neel scope switch +# TODO-neel avoid duplication between api and __init__ +from sentry_sdk.opentelemetry.scope import PotelScope as Scope from sentry_sdk.transport import Transport, HttpTransport from sentry_sdk.client import Client @@ -7,7 +9,6 @@ from sentry_sdk.consts import VERSION # noqa __all__ = [ # noqa - "Hub", "Scope", "Client", "Transport", @@ -20,8 +21,8 @@ "capture_event", "capture_exception", "capture_message", - "configure_scope", "continue_trace", + "new_trace", "flush", "get_baggage", "get_client", @@ -34,11 +35,9 @@ "isolation_scope", "last_event_id", "new_scope", - "push_scope", "set_context", "set_extra", "set_level", - "set_measurement", "set_tag", "set_tags", "set_user", @@ -58,6 +57,3 @@ init_debug_support() del init_debug_support - -# circular imports -from sentry_sdk.hub import Hub diff --git a/sentry_sdk/_compat.py b/sentry_sdk/_compat.py index a811cf2120..8fc7f16ee7 100644 --- a/sentry_sdk/_compat.py +++ b/sentry_sdk/_compat.py @@ -1,32 +1,25 @@ +from __future__ import annotations import sys - from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any - from typing import TypeVar - - T = TypeVar("T") -PY37 = sys.version_info[0] == 3 and sys.version_info[1] >= 7 PY38 = sys.version_info[0] == 3 and sys.version_info[1] >= 8 PY310 = sys.version_info[0] == 3 and sys.version_info[1] >= 10 PY311 = sys.version_info[0] == 3 and sys.version_info[1] >= 11 -def with_metaclass(meta, *bases): - # type: (Any, *Any) -> Any +def with_metaclass(meta: Any, *bases: Any) -> Any: class MetaClass(type): - def __new__(metacls, name, this_bases, d): - # type: (Any, Any, Any, Any) -> Any + def __new__(metacls: Any, name: Any, this_bases: Any, d: Any) -> Any: return meta(name, bases, d) return type.__new__(MetaClass, "temporary_class", (), {}) -def check_uwsgi_thread_support(): - # type: () -> bool +def check_uwsgi_thread_support() -> bool: # We check two things here: # # 1. uWSGI doesn't run in threaded mode by default -- issue a warning if @@ -46,8 +39,7 @@ def check_uwsgi_thread_support(): from sentry_sdk.consts import FALSE_VALUES - def enabled(option): - # type: (str) -> bool + def enabled(option: str) -> bool: value = opt.get(option, False) if isinstance(value, bool): return value diff --git a/sentry_sdk/_init_implementation.py b/sentry_sdk/_init_implementation.py index eb02b3d11e..06e7f28d4f 100644 --- a/sentry_sdk/_init_implementation.py +++ b/sentry_sdk/_init_implementation.py @@ -1,69 +1,31 @@ -import warnings +from __future__ import annotations from typing import TYPE_CHECKING -import sentry_sdk - if TYPE_CHECKING: - from typing import Any, ContextManager, Optional - - import sentry_sdk.consts - - -class _InitGuard: - _CONTEXT_MANAGER_DEPRECATION_WARNING_MESSAGE = ( - "Using the return value of sentry_sdk.init as a context manager " - "and manually calling the __enter__ and __exit__ methods on the " - "return value are deprecated. We are no longer maintaining this " - "functionality, and we will remove it in the next major release." - ) + from typing import Optional, Any - def __init__(self, client): - # type: (sentry_sdk.Client) -> None - self._client = client - - def __enter__(self): - # type: () -> _InitGuard - warnings.warn( - self._CONTEXT_MANAGER_DEPRECATION_WARNING_MESSAGE, - stacklevel=2, - category=DeprecationWarning, - ) - - return self - - def __exit__(self, exc_type, exc_value, tb): - # type: (Any, Any, Any) -> None - warnings.warn( - self._CONTEXT_MANAGER_DEPRECATION_WARNING_MESSAGE, - stacklevel=2, - category=DeprecationWarning, - ) - - c = self._client - if c is not None: - c.close() +import sentry_sdk +from sentry_sdk.consts import ClientConstructor +from sentry_sdk.opentelemetry.scope import setup_scope_context_management -def _check_python_deprecations(): - # type: () -> None +def _check_python_deprecations() -> None: # Since we're likely to deprecate Python versions in the future, I'm keeping # this handy function around. Use this to detect the Python version used and # to output logger.warning()s if it's deprecated. pass -def _init(*args, **kwargs): - # type: (*Optional[str], **Any) -> ContextManager[Any] +def _init(*args: Optional[str], **kwargs: Any) -> None: """Initializes the SDK and optionally integrations. This takes the same arguments as the client constructor. """ + setup_scope_context_management() client = sentry_sdk.Client(*args, **kwargs) sentry_sdk.get_global_scope().set_client(client) _check_python_deprecations() - rv = _InitGuard(client) - return rv if TYPE_CHECKING: @@ -73,7 +35,7 @@ def _init(*args, **kwargs): # Use `ClientConstructor` to define the argument types of `init` and # `ContextManager[Any]` to tell static analyzers about the return type. - class init(sentry_sdk.consts.ClientConstructor, _InitGuard): # noqa: N801 + class init(ClientConstructor): # noqa: N801 pass else: diff --git a/sentry_sdk/_log_batcher.py b/sentry_sdk/_log_batcher.py index 87bebdb226..5486e91b26 100644 --- a/sentry_sdk/_log_batcher.py +++ b/sentry_sdk/_log_batcher.py @@ -1,37 +1,35 @@ +from __future__ import annotations import os import random import threading from datetime import datetime, timezone -from typing import Optional, List, Callable, TYPE_CHECKING, Any from sentry_sdk.utils import format_timestamp, safe_repr from sentry_sdk.envelope import Envelope, Item, PayloadRef +from typing import TYPE_CHECKING + if TYPE_CHECKING: from sentry_sdk._types import Log + from typing import Optional, List, Callable, Any class LogBatcher: MAX_LOGS_BEFORE_FLUSH = 100 FLUSH_WAIT_TIME = 5.0 - def __init__( - self, - capture_func, # type: Callable[[Envelope], None] - ): - # type: (...) -> None - self._log_buffer = [] # type: List[Log] + def __init__(self, capture_func: Callable[[Envelope], None]) -> None: + self._log_buffer: List[Log] = [] self._capture_func = capture_func self._running = True self._lock = threading.Lock() - self._flush_event = threading.Event() # type: threading.Event + self._flush_event = threading.Event() - self._flusher = None # type: Optional[threading.Thread] - self._flusher_pid = None # type: Optional[int] + self._flusher: Optional[threading.Thread] = None + self._flusher_pid: Optional[int] = None - def _ensure_thread(self): - # type: (...) -> bool + def _ensure_thread(self) -> bool: """For forking processes we might need to restart this thread. This ensures that our process actually has that thread running. """ @@ -63,18 +61,13 @@ def _ensure_thread(self): return True - def _flush_loop(self): - # type: (...) -> None + def _flush_loop(self) -> None: while self._running: self._flush_event.wait(self.FLUSH_WAIT_TIME + random.random()) self._flush_event.clear() self._flush() - def add( - self, - log, # type: Log - ): - # type: (...) -> None + def add(self, log: Log) -> None: if not self._ensure_thread() or self._flusher is None: return None @@ -83,8 +76,7 @@ def add( if len(self._log_buffer) >= self.MAX_LOGS_BEFORE_FLUSH: self._flush_event.set() - def kill(self): - # type: (...) -> None + def kill(self) -> None: if self._flusher is None: return @@ -92,15 +84,12 @@ def kill(self): self._flush_event.set() self._flusher = None - def flush(self): - # type: (...) -> None + def flush(self) -> None: self._flush() @staticmethod - def _log_to_transport_format(log): - # type: (Log) -> Any - def format_attribute(val): - # type: (int | float | str | bool) -> Any + def _log_to_transport_format(log: Log) -> Any: + def format_attribute(val: int | float | str | bool) -> Any: if isinstance(val, bool): return {"value": val, "type": "boolean"} if isinstance(val, int): @@ -128,8 +117,7 @@ def format_attribute(val): return res - def _flush(self): - # type: (...) -> Optional[Envelope] + def _flush(self) -> Optional[Envelope]: envelope = Envelope( headers={"sent_at": format_timestamp(datetime.now(timezone.utc))} diff --git a/sentry_sdk/_lru_cache.py b/sentry_sdk/_lru_cache.py index cbadd9723b..aec8883546 100644 --- a/sentry_sdk/_lru_cache.py +++ b/sentry_sdk/_lru_cache.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import TYPE_CHECKING if TYPE_CHECKING: @@ -8,17 +10,15 @@ class LRUCache: - def __init__(self, max_size): - # type: (int) -> None + def __init__(self, max_size: int) -> None: if max_size <= 0: raise AssertionError(f"invalid max_size: {max_size}") self.max_size = max_size - self._data = {} # type: dict[Any, Any] + self._data: dict[Any, Any] = {} self.hits = self.misses = 0 self.full = False - def set(self, key, value): - # type: (Any, Any) -> None + def set(self, key: Any, value: Any) -> None: current = self._data.pop(key, _SENTINEL) if current is not _SENTINEL: self._data[key] = value @@ -29,8 +29,7 @@ def set(self, key, value): self._data[key] = value self.full = len(self._data) >= self.max_size - def get(self, key, default=None): - # type: (Any, Any) -> Any + def get(self, key: Any, default: Any = None) -> Any: try: ret = self._data.pop(key) except KeyError: @@ -42,6 +41,5 @@ def get(self, key, default=None): return ret - def get_all(self): - # type: () -> list[tuple[Any, Any]] + def get_all(self) -> list[tuple[Any, Any]]: return list(self._data.items()) diff --git a/sentry_sdk/_queue.py b/sentry_sdk/_queue.py index a21c86ec0a..59fac1786e 100644 --- a/sentry_sdk/_queue.py +++ b/sentry_sdk/_queue.py @@ -10,9 +10,6 @@ https://bugs.python.org/issue14976 https://github.com/sqlalchemy/sqlalchemy/blob/4eb747b61f0c1b1c25bdee3856d7195d10a0c227/lib/sqlalchemy/queue.py#L1 -We also vendor the code to evade eventlet's broken monkeypatching, see -https://github.com/getsentry/sentry-python/pull/484 - Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation; @@ -81,6 +78,7 @@ if TYPE_CHECKING: from typing import Any + __all__ = ["EmptyError", "FullError", "Queue"] @@ -275,7 +273,7 @@ def get_nowait(self): # Initialize the queue representation def _init(self, maxsize): - self.queue = deque() # type: Any + self.queue: Any = deque() def _qsize(self): return len(self.queue) diff --git a/sentry_sdk/_types.py b/sentry_sdk/_types.py index 8336617a8d..9625859307 100644 --- a/sentry_sdk/_types.py +++ b/sentry_sdk/_types.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import TYPE_CHECKING, TypeVar, Union @@ -18,32 +20,27 @@ class AnnotatedValue: __slots__ = ("value", "metadata") - def __init__(self, value, metadata): - # type: (Optional[Any], Dict[str, Any]) -> None + def __init__(self, value: Optional[Any], metadata: Dict[str, Any]) -> None: self.value = value self.metadata = metadata - def __eq__(self, other): - # type: (Any) -> bool + def __eq__(self, other: Any) -> bool: if not isinstance(other, AnnotatedValue): return False return self.value == other.value and self.metadata == other.metadata - def __str__(self): - # type: (AnnotatedValue) -> str + def __str__(self) -> str: return str({"value": str(self.value), "metadata": str(self.metadata)}) - def __len__(self): - # type: (AnnotatedValue) -> int + def __len__(self) -> int: if self.value is not None: return len(self.value) else: return 0 @classmethod - def removed_because_raw_data(cls): - # type: () -> AnnotatedValue + def removed_because_raw_data(cls) -> AnnotatedValue: """The value was removed because it could not be parsed. This is done for request body values that are not json nor a form.""" return AnnotatedValue( value="", @@ -58,8 +55,7 @@ def removed_because_raw_data(cls): ) @classmethod - def removed_because_over_size_limit(cls, value=""): - # type: (Any) -> AnnotatedValue + def removed_because_over_size_limit(cls, value: Any = "") -> AnnotatedValue: """ The actual value was removed because the size of the field exceeded the configured maximum size, for example specified with the max_request_body_size sdk option. @@ -77,8 +73,7 @@ def removed_because_over_size_limit(cls, value=""): ) @classmethod - def substituted_because_contains_sensitive_data(cls): - # type: () -> AnnotatedValue + def substituted_because_contains_sensitive_data(cls) -> AnnotatedValue: """The actual value was removed because it contained sensitive information.""" return AnnotatedValue( value=SENSITIVE_DATA_SUBSTITUTE, @@ -107,9 +102,7 @@ def substituted_because_contains_sensitive_data(cls): from typing import Callable from typing import Dict from typing import Mapping - from typing import NotRequired from typing import Optional - from typing import Tuple from typing import Type from typing_extensions import Literal, TypedDict @@ -121,45 +114,6 @@ class SDKInfo(TypedDict): # "critical" is an alias of "fatal" recognized by Relay LogLevelStr = Literal["fatal", "critical", "error", "warning", "info", "debug"] - DurationUnit = Literal[ - "nanosecond", - "microsecond", - "millisecond", - "second", - "minute", - "hour", - "day", - "week", - ] - - InformationUnit = Literal[ - "bit", - "byte", - "kilobyte", - "kibibyte", - "megabyte", - "mebibyte", - "gigabyte", - "gibibyte", - "terabyte", - "tebibyte", - "petabyte", - "pebibyte", - "exabyte", - "exbibyte", - ] - - FractionUnit = Literal["ratio", "percent"] - MeasurementUnit = Union[DurationUnit, InformationUnit, FractionUnit, str] - - MeasurementValue = TypedDict( - "MeasurementValue", - { - "value": float, - "unit": NotRequired[Optional[MeasurementUnit]], - }, - ) - Event = TypedDict( "Event", { @@ -181,7 +135,6 @@ class SDKInfo(TypedDict): "level": LogLevelStr, "logentry": Mapping[str, object], "logger": str, - "measurements": dict[str, MeasurementValue], "message": str, "modules": dict[str, str], "monitor_config": Mapping[str, object], @@ -210,7 +163,6 @@ class SDKInfo(TypedDict): "type": Literal["check_in", "transaction"], "user": dict[str, object], "_dropped_spans": int, - "_metrics_summary": dict[str, object], }, total=False, ) @@ -266,7 +218,6 @@ class SDKInfo(TypedDict): "internal", "profile", "profile_chunk", - "metric_bucket", "monitor", "span", "log", @@ -276,26 +227,6 @@ class SDKInfo(TypedDict): ContinuousProfilerMode = Literal["thread", "gevent", "unknown"] ProfilerMode = Union[ContinuousProfilerMode, Literal["sleep"]] - # Type of the metric. - MetricType = Literal["d", "s", "g", "c"] - - # Value of the metric. - MetricValue = Union[int, float, str] - - # Internal representation of tags as a tuple of tuples (this is done in order to allow for the same key to exist - # multiple times). - MetricTagsInternal = Tuple[Tuple[str, str], ...] - - # External representation of tags as a dictionary. - MetricTagValue = Union[str, int, float, None] - MetricTags = Mapping[str, MetricTagValue] - - # Value inside the generator for the metric value. - FlushedMetricValue = Union[int, float] - - BucketKey = Tuple[MetricType, str, MeasurementUnit, MetricTagsInternal] - MetricMetaKey = Tuple[MetricType, str, MeasurementUnit] - MonitorConfigScheduleType = Literal["crontab", "interval"] MonitorConfigScheduleUnit = Literal[ "year", diff --git a/sentry_sdk/_werkzeug.py b/sentry_sdk/_werkzeug.py index 0fa3d611f1..8886d5cffa 100644 --- a/sentry_sdk/_werkzeug.py +++ b/sentry_sdk/_werkzeug.py @@ -32,12 +32,12 @@ SUCH DAMAGE. """ +from __future__ import annotations + from typing import TYPE_CHECKING if TYPE_CHECKING: - from typing import Dict - from typing import Iterator - from typing import Tuple + from typing import Dict, Iterator, Tuple # @@ -47,8 +47,7 @@ # We need this function because Django does not give us a "pure" http header # dict. So we might as well use it for all WSGI integrations. # -def _get_headers(environ): - # type: (Dict[str, str]) -> Iterator[Tuple[str, str]] +def _get_headers(environ: Dict[str, str]) -> Iterator[Tuple[str, str]]: """ Returns only proper HTTP headers. """ @@ -67,8 +66,7 @@ def _get_headers(environ): # `get_host` comes from `werkzeug.wsgi.get_host` # https://github.com/pallets/werkzeug/blob/1.0.1/src/werkzeug/wsgi.py#L145 # -def get_host(environ, use_x_forwarded_for=False): - # type: (Dict[str, str], bool) -> str +def get_host(environ: Dict[str, str], use_x_forwarded_for: bool = False) -> str: """ Return the host for the given WSGI environment. """ diff --git a/sentry_sdk/ai/monitoring.py b/sentry_sdk/ai/monitoring.py index e3f372c3ba..33c07f7083 100644 --- a/sentry_sdk/ai/monitoring.py +++ b/sentry_sdk/ai/monitoring.py @@ -1,3 +1,4 @@ +from __future__ import annotations import inspect from functools import wraps @@ -15,32 +16,29 @@ _ai_pipeline_name = ContextVar("ai_pipeline_name", default=None) -def set_ai_pipeline_name(name): - # type: (Optional[str]) -> None +def set_ai_pipeline_name(name: Optional[str]) -> None: _ai_pipeline_name.set(name) -def get_ai_pipeline_name(): - # type: () -> Optional[str] +def get_ai_pipeline_name() -> Optional[str]: return _ai_pipeline_name.get() -def ai_track(description, **span_kwargs): - # type: (str, Any) -> Callable[..., Any] - def decorator(f): - # type: (Callable[..., Any]) -> Callable[..., Any] - def sync_wrapped(*args, **kwargs): - # type: (Any, Any) -> Any +def ai_track(description: str, **span_kwargs: Any) -> Callable[..., Any]: + def decorator(f: Callable[..., Any]) -> Callable[..., Any]: + def sync_wrapped(*args: Any, **kwargs: Any) -> Any: curr_pipeline = _ai_pipeline_name.get() op = span_kwargs.pop("op", "ai.run" if curr_pipeline else "ai.pipeline") - with start_span(name=description, op=op, **span_kwargs) as span: + with start_span( + name=description, op=op, only_as_child_span=True, **span_kwargs + ) as span: for k, v in kwargs.pop("sentry_tags", {}).items(): span.set_tag(k, v) for k, v in kwargs.pop("sentry_data", {}).items(): - span.set_data(k, v) + span.set_attribute(k, v) if curr_pipeline: - span.set_data(SPANDATA.GEN_AI_PIPELINE_NAME, curr_pipeline) + span.set_attribute(SPANDATA.GEN_AI_PIPELINE_NAME, curr_pipeline) return f(*args, **kwargs) else: _ai_pipeline_name.set(description) @@ -58,18 +56,19 @@ def sync_wrapped(*args, **kwargs): _ai_pipeline_name.set(None) return res - async def async_wrapped(*args, **kwargs): - # type: (Any, Any) -> Any + async def async_wrapped(*args: Any, **kwargs: Any) -> Any: curr_pipeline = _ai_pipeline_name.get() op = span_kwargs.pop("op", "ai.run" if curr_pipeline else "ai.pipeline") - with start_span(name=description, op=op, **span_kwargs) as span: + with start_span( + name=description, op=op, only_as_child_span=True, **span_kwargs + ) as span: for k, v in kwargs.pop("sentry_tags", {}).items(): span.set_tag(k, v) for k, v in kwargs.pop("sentry_data", {}).items(): - span.set_data(k, v) + span.set_attribute(k, v) if curr_pipeline: - span.set_data(SPANDATA.GEN_AI_PIPELINE_NAME, curr_pipeline) + span.set_attribute(SPANDATA.GEN_AI_PIPELINE_NAME, curr_pipeline) return await f(*args, **kwargs) else: _ai_pipeline_name.set(description) @@ -96,34 +95,32 @@ async def async_wrapped(*args, **kwargs): def record_token_usage( - span, - input_tokens=None, - input_tokens_cached=None, - output_tokens=None, - output_tokens_reasoning=None, - total_tokens=None, -): - # type: (Span, Optional[int], Optional[int], Optional[int], Optional[int], Optional[int]) -> None - + span: Span, + input_tokens: Optional[int] = None, + input_tokens_cached: Optional[int] = None, + output_tokens: Optional[int] = None, + output_tokens_reasoning: Optional[int] = None, + total_tokens: Optional[int] = None, +) -> None: # TODO: move pipeline name elsewhere ai_pipeline_name = get_ai_pipeline_name() if ai_pipeline_name: - span.set_data(SPANDATA.GEN_AI_PIPELINE_NAME, ai_pipeline_name) + span.set_attribute(SPANDATA.GEN_AI_PIPELINE_NAME, ai_pipeline_name) if input_tokens is not None: - span.set_data(SPANDATA.GEN_AI_USAGE_INPUT_TOKENS, input_tokens) + span.set_attribute(SPANDATA.GEN_AI_USAGE_INPUT_TOKENS, input_tokens) if input_tokens_cached is not None: - span.set_data( + span.set_attribute( SPANDATA.GEN_AI_USAGE_INPUT_TOKENS_CACHED, input_tokens_cached, ) if output_tokens is not None: - span.set_data(SPANDATA.GEN_AI_USAGE_OUTPUT_TOKENS, output_tokens) + span.set_attribute(SPANDATA.GEN_AI_USAGE_OUTPUT_TOKENS, output_tokens) if output_tokens_reasoning is not None: - span.set_data( + span.set_attribute( SPANDATA.GEN_AI_USAGE_OUTPUT_TOKENS_REASONING, output_tokens_reasoning, ) @@ -132,4 +129,4 @@ def record_token_usage( total_tokens = input_tokens + output_tokens if total_tokens is not None: - span.set_data(SPANDATA.GEN_AI_USAGE_TOTAL_TOKENS, total_tokens) + span.set_attribute(SPANDATA.GEN_AI_USAGE_TOTAL_TOKENS, total_tokens) diff --git a/sentry_sdk/ai/utils.py b/sentry_sdk/ai/utils.py index a3c62600c0..103ef03a23 100644 --- a/sentry_sdk/ai/utils.py +++ b/sentry_sdk/ai/utils.py @@ -1,3 +1,4 @@ +from __future__ import annotations from typing import TYPE_CHECKING if TYPE_CHECKING: @@ -7,8 +8,7 @@ from sentry_sdk.utils import logger -def _normalize_data(data): - # type: (Any) -> Any +def _normalize_data(data: Any) -> Any: # convert pydantic data (e.g. OpenAI v1+) to json compatible format if hasattr(data, "model_dump"): @@ -27,10 +27,9 @@ def _normalize_data(data): return data -def set_data_normalized(span, key, value): - # type: (Span, str, Any) -> None +def set_data_normalized(span: Span, key: str, value: Any) -> None: normalized = _normalize_data(value) if isinstance(normalized, (int, float, bool, str)): - span.set_data(key, normalized) + span.set_attribute(key, normalized) else: - span.set_data(key, str(normalized)) + span.set_attribute(key, str(normalized)) diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index 43758b4d78..ea6a678ee1 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -1,51 +1,42 @@ +from __future__ import annotations import inspect -import warnings from contextlib import contextmanager from sentry_sdk import tracing_utils, Client from sentry_sdk._init_implementation import init -from sentry_sdk.consts import INSTRUMENTER -from sentry_sdk.scope import Scope, _ScopeManager, new_scope, isolation_scope -from sentry_sdk.tracing import NoOpSpan, Transaction, trace +from sentry_sdk.tracing import trace from sentry_sdk.crons import monitor +# TODO-neel-potel make 2 scope strategies/impls and switch +from sentry_sdk.scope import Scope as BaseScope +from sentry_sdk.opentelemetry.scope import ( + PotelScope as Scope, + new_scope, + isolation_scope, + use_scope, + use_isolation_scope, +) + + from typing import TYPE_CHECKING if TYPE_CHECKING: - from collections.abc import Mapping + from typing import Any, Optional, Callable, TypeVar, Union, Generator - from typing import Any - from typing import Dict - from typing import Generator - from typing import Optional - from typing import overload - from typing import Callable - from typing import TypeVar - from typing import ContextManager - from typing import Union - - from typing_extensions import Unpack + T = TypeVar("T") + F = TypeVar("F", bound=Callable[..., Any]) + from collections.abc import Mapping from sentry_sdk.client import BaseClient + from sentry_sdk.tracing import Span from sentry_sdk._types import ( Event, Hint, - Breadcrumb, - BreadcrumbHint, - ExcInfo, - MeasurementUnit, LogLevelStr, - SamplingContext, + ExcInfo, + BreadcrumbHint, + Breadcrumb, ) - from sentry_sdk.tracing import Span, TransactionKwargs - - T = TypeVar("T") - F = TypeVar("F", bound=Callable[..., Any]) -else: - - def overload(x): - # type: (T) -> T - return x # When changing this, update __all__ in __init__.py too @@ -56,8 +47,8 @@ def overload(x): "capture_event", "capture_exception", "capture_message", - "configure_scope", "continue_trace", + "new_trace", "flush", "get_baggage", "get_client", @@ -70,11 +61,9 @@ def overload(x): "isolation_scope", "last_event_id", "new_scope", - "push_scope", "set_context", "set_extra", "set_level", - "set_measurement", "set_tag", "set_tags", "set_user", @@ -82,6 +71,8 @@ def overload(x): "start_transaction", "trace", "monitor", + "use_scope", + "use_isolation_scope", "start_session", "end_session", "set_transaction_name", @@ -89,8 +80,7 @@ def overload(x): ] -def scopemethod(f): - # type: (F) -> F +def scopemethod(f: F) -> F: f.__doc__ = "%s\n\n%s" % ( "Alias for :py:meth:`sentry_sdk.Scope.%s`" % f.__name__, inspect.getdoc(getattr(Scope, f.__name__)), @@ -98,8 +88,7 @@ def scopemethod(f): return f -def clientmethod(f): - # type: (F) -> F +def clientmethod(f: F) -> F: f.__doc__ = "%s\n\n%s" % ( "Alias for :py:meth:`sentry_sdk.Client.%s`" % f.__name__, inspect.getdoc(getattr(Client, f.__name__)), @@ -108,13 +97,11 @@ def clientmethod(f): @scopemethod -def get_client(): - # type: () -> BaseClient +def get_client() -> BaseClient: return Scope.get_client() -def is_initialized(): - # type: () -> bool +def is_initialized() -> bool: """ .. versionadded:: 2.0.0 @@ -128,26 +115,22 @@ def is_initialized(): @scopemethod -def get_global_scope(): - # type: () -> Scope +def get_global_scope() -> BaseScope: return Scope.get_global_scope() @scopemethod -def get_isolation_scope(): - # type: () -> Scope +def get_isolation_scope() -> Scope: return Scope.get_isolation_scope() @scopemethod -def get_current_scope(): - # type: () -> Scope +def get_current_scope() -> Scope: return Scope.get_current_scope() @scopemethod -def last_event_id(): - # type: () -> Optional[str] +def last_event_id() -> Optional[str]: """ See :py:meth:`sentry_sdk.Scope.last_event_id` documentation regarding this method's limitations. @@ -157,23 +140,21 @@ def last_event_id(): @scopemethod def capture_event( - event, # type: Event - hint=None, # type: Optional[Hint] - scope=None, # type: Optional[Any] - **scope_kwargs, # type: Any -): - # type: (...) -> Optional[str] + event: Event, + hint: Optional[Hint] = None, + scope: Optional[Any] = None, + **scope_kwargs: Any, +) -> Optional[str]: return get_current_scope().capture_event(event, hint, scope=scope, **scope_kwargs) @scopemethod def capture_message( - message, # type: str - level=None, # type: Optional[LogLevelStr] - scope=None, # type: Optional[Any] - **scope_kwargs, # type: Any -): - # type: (...) -> Optional[str] + message: str, + level: Optional[LogLevelStr] = None, + scope: Optional[Any] = None, + **scope_kwargs: Any, +) -> Optional[str]: return get_current_scope().capture_message( message, level, scope=scope, **scope_kwargs ) @@ -181,23 +162,21 @@ def capture_message( @scopemethod def capture_exception( - error=None, # type: Optional[Union[BaseException, ExcInfo]] - scope=None, # type: Optional[Any] - **scope_kwargs, # type: Any -): - # type: (...) -> Optional[str] + error: Optional[Union[BaseException, ExcInfo]] = None, + scope: Optional[Any] = None, + **scope_kwargs: Any, +) -> Optional[str]: return get_current_scope().capture_exception(error, scope=scope, **scope_kwargs) @scopemethod def add_attachment( - bytes=None, # type: Union[None, bytes, Callable[[], bytes]] - filename=None, # type: Optional[str] - path=None, # type: Optional[str] - content_type=None, # type: Optional[str] - add_to_transactions=False, # type: bool -): - # type: (...) -> None + bytes: Union[None, bytes, Callable[[], bytes]] = None, + filename: Optional[str] = None, + path: Optional[str] = None, + content_type: Optional[str] = None, + add_to_transactions: bool = False, +) -> None: return get_isolation_scope().add_attachment( bytes, filename, path, content_type, add_to_transactions ) @@ -205,171 +184,73 @@ def add_attachment( @scopemethod def add_breadcrumb( - crumb=None, # type: Optional[Breadcrumb] - hint=None, # type: Optional[BreadcrumbHint] - **kwargs, # type: Any -): - # type: (...) -> None + crumb: Optional[Breadcrumb] = None, + hint: Optional[BreadcrumbHint] = None, + **kwargs: Any, +) -> None: return get_isolation_scope().add_breadcrumb(crumb, hint, **kwargs) -@overload -def configure_scope(): - # type: () -> ContextManager[Scope] - pass - - -@overload -def configure_scope( # noqa: F811 - callback, # type: Callable[[Scope], None] -): - # type: (...) -> None - pass - - -def configure_scope( # noqa: F811 - callback=None, # type: Optional[Callable[[Scope], None]] -): - # type: (...) -> Optional[ContextManager[Scope]] - """ - Reconfigures the scope. - - :param callback: If provided, call the callback with the current scope. - - :returns: If no callback is provided, returns a context manager that returns the scope. - """ - warnings.warn( - "sentry_sdk.configure_scope is deprecated and will be removed in the next major version. " - "Please consult our migration guide to learn how to migrate to the new API: " - "https://docs.sentry.io/platforms/python/migration/1.x-to-2.x#scope-configuring", - DeprecationWarning, - stacklevel=2, - ) - - scope = get_isolation_scope() - scope.generate_propagation_context() - - if callback is not None: - # TODO: used to return None when client is None. Check if this changes behavior. - callback(scope) - - return None - - @contextmanager - def inner(): - # type: () -> Generator[Scope, None, None] - yield scope - - return inner() - - -@overload -def push_scope(): - # type: () -> ContextManager[Scope] - pass - - -@overload -def push_scope( # noqa: F811 - callback, # type: Callable[[Scope], None] -): - # type: (...) -> None - pass - - -def push_scope( # noqa: F811 - callback=None, # type: Optional[Callable[[Scope], None]] -): - # type: (...) -> Optional[ContextManager[Scope]] - """ - Pushes a new layer on the scope stack. - - :param callback: If provided, this method pushes a scope, calls - `callback`, and pops the scope again. - - :returns: If no `callback` is provided, a context manager that should - be used to pop the scope again. - """ - warnings.warn( - "sentry_sdk.push_scope is deprecated and will be removed in the next major version. " - "Please consult our migration guide to learn how to migrate to the new API: " - "https://docs.sentry.io/platforms/python/migration/1.x-to-2.x#scope-pushing", - DeprecationWarning, - stacklevel=2, - ) - - if callback is not None: - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - with push_scope() as scope: - callback(scope) - return None - - return _ScopeManager() - - @scopemethod -def set_tag(key, value): - # type: (str, Any) -> None +def set_tag(key: str, value: Any) -> None: return get_isolation_scope().set_tag(key, value) @scopemethod -def set_tags(tags): - # type: (Mapping[str, object]) -> None +def set_tags(tags: Mapping[str, object]) -> None: return get_isolation_scope().set_tags(tags) @scopemethod -def set_context(key, value): - # type: (str, Dict[str, Any]) -> None +def set_context(key: str, value: dict[str, Any]) -> None: return get_isolation_scope().set_context(key, value) @scopemethod -def set_extra(key, value): - # type: (str, Any) -> None +def set_extra(key: str, value: Any) -> None: return get_isolation_scope().set_extra(key, value) @scopemethod -def set_user(value): - # type: (Optional[Dict[str, Any]]) -> None +def set_user(value: Optional[dict[str, Any]]) -> None: return get_isolation_scope().set_user(value) @scopemethod -def set_level(value): - # type: (LogLevelStr) -> None +def set_level(value: LogLevelStr) -> None: return get_isolation_scope().set_level(value) @clientmethod def flush( - timeout=None, # type: Optional[float] - callback=None, # type: Optional[Callable[[int, float], None]] -): - # type: (...) -> None + timeout: Optional[float] = None, + callback: Optional[Callable[[int, float], None]] = None, +) -> None: return get_client().flush(timeout=timeout, callback=callback) -@scopemethod -def start_span( - **kwargs, # type: Any -): - # type: (...) -> Span +def start_span(**kwargs: Any) -> Span: + """ + Start and return a span. + + This is the entry point to manual tracing instrumentation. + + A tree structure can be built by adding child spans to the span. + To start a new child span within the span, call the `start_child()` method. + + When used as a context manager, spans are automatically finished at the end + of the `with` block. If not using context managers, call the `finish()` + method. + """ return get_current_scope().start_span(**kwargs) -@scopemethod -def start_transaction( - transaction=None, # type: Optional[Transaction] - instrumenter=INSTRUMENTER.SENTRY, # type: str - custom_sampling_context=None, # type: Optional[SamplingContext] - **kwargs, # type: Unpack[TransactionKwargs] -): - # type: (...) -> Union[Transaction, NoOpSpan] +def start_transaction(transaction: Optional[Span] = None, **kwargs: Any) -> Span: """ + .. deprecated:: 3.0.0 + This function is deprecated and will be removed in a future release. + Use :py:meth:`sentry_sdk.start_span` instead. + Start and return a transaction on the current scope. Start an existing transaction if given, otherwise create and start a new @@ -393,47 +274,31 @@ def start_transaction( :param transaction: The transaction to start. If omitted, we create and start a new transaction. - :param instrumenter: This parameter is meant for internal use only. It - will be removed in the next major version. - :param custom_sampling_context: The transaction's custom sampling context. :param kwargs: Optional keyword arguments to be passed to the Transaction constructor. See :py:class:`sentry_sdk.tracing.Transaction` for available arguments. """ - return get_current_scope().start_transaction( - transaction, instrumenter, custom_sampling_context, **kwargs + return start_span( + span=transaction, + **kwargs, ) -def set_measurement(name, value, unit=""): - # type: (str, float, MeasurementUnit) -> None - """ - .. deprecated:: 2.28.0 - This function is deprecated and will be removed in the next major release. - """ - transaction = get_current_scope().transaction - if transaction is not None: - transaction.set_measurement(name, value, unit) - - -def get_current_span(scope=None): - # type: (Optional[Scope]) -> Optional[Span] +def get_current_span(scope: Optional[Scope] = None) -> Optional[Span]: """ Returns the currently active span if there is one running, otherwise `None` """ return tracing_utils.get_current_span(scope) -def get_traceparent(): - # type: () -> Optional[str] +def get_traceparent() -> Optional[str]: """ Returns the traceparent either from the active span or from the scope. """ return get_current_scope().get_traceparent() -def get_baggage(): - # type: () -> Optional[str] +def get_baggage() -> Optional[str]: """ Returns Baggage either from the active span or from the scope. """ @@ -444,40 +309,43 @@ def get_baggage(): return None -def continue_trace( - environ_or_headers, op=None, name=None, source=None, origin="manual" -): - # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str], str) -> Transaction +@contextmanager +def continue_trace(environ_or_headers: dict[str, Any]) -> Generator[None, None, None]: """ - Sets the propagation context from environment or headers and returns a transaction. + Sets the propagation context from environment or headers to continue an incoming trace. """ - return get_isolation_scope().continue_trace( - environ_or_headers, op, name, source, origin - ) + with get_isolation_scope().continue_trace(environ_or_headers): + yield + + +@contextmanager +def new_trace() -> Generator[None, None, None]: + """ + Force creation of a new trace. + """ + with get_isolation_scope().new_trace(): + yield @scopemethod def start_session( - session_mode="application", # type: str -): - # type: (...) -> None + session_mode: str = "application", +) -> None: return get_isolation_scope().start_session(session_mode=session_mode) @scopemethod -def end_session(): - # type: () -> None +def end_session() -> None: return get_isolation_scope().end_session() @scopemethod -def set_transaction_name(name, source=None): - # type: (str, Optional[str]) -> None +def set_transaction_name(name: str, source: Optional[str] = None) -> None: return get_current_scope().set_transaction_name(name, source) -def update_current_span(op=None, name=None, attributes=None, data=None): - # type: (Optional[str], Optional[str], Optional[dict[str, Union[str, int, float, bool]]], Optional[dict[str, Any]]) -> None +def update_current_span(op=None, name=None, attributes=None): + # type: (Optional[str], Optional[str], Optional[dict[str, Union[str, int, float, bool]]]) -> None """ Update the current active span with the provided parameters. @@ -496,15 +364,6 @@ def update_current_span(op=None, name=None, attributes=None, data=None): "SELECT * FROM users"). If not provided, the span's name will remain unchanged. :type name: str or None - :param data: A dictionary of key-value pairs to add as data to the span. This - data will be merged with any existing span data. If not provided, - no data will be added. - - .. deprecated:: 2.35.0 - Use ``attributes`` instead. The ``data`` parameter will be removed - in a future version. - :type data: dict[str, Union[str, int, float, bool]] or None - :param attributes: A dictionary of key-value pairs to add as attributes to the span. Attribute values must be strings, integers, floats, or booleans. These attributes will be merged with any existing span data. If not provided, @@ -535,21 +394,7 @@ def update_current_span(op=None, name=None, attributes=None, data=None): current_span.op = op if name is not None: - # internally it is still description - current_span.description = name - - if data is not None and attributes is not None: - raise ValueError( - "Cannot provide both `data` and `attributes`. Please use only `attributes`." - ) - - if data is not None: - warnings.warn( - "The `data` parameter is deprecated. Please use `attributes` instead.", - DeprecationWarning, - stacklevel=2, - ) - attributes = data + current_span.name = name if attributes is not None: - current_span.update_data(attributes) + current_span.set_attributes(attributes) diff --git a/sentry_sdk/attachments.py b/sentry_sdk/attachments.py index e5404f8658..1f2fe7bb30 100644 --- a/sentry_sdk/attachments.py +++ b/sentry_sdk/attachments.py @@ -1,3 +1,4 @@ +from __future__ import annotations import os import mimetypes @@ -31,13 +32,12 @@ class Attachment: def __init__( self, - bytes=None, # type: Union[None, bytes, Callable[[], bytes]] - filename=None, # type: Optional[str] - path=None, # type: Optional[str] - content_type=None, # type: Optional[str] - add_to_transactions=False, # type: bool - ): - # type: (...) -> None + bytes: Union[None, bytes, Callable[[], bytes]] = None, + filename: Optional[str] = None, + path: Optional[str] = None, + content_type: Optional[str] = None, + add_to_transactions: bool = False, + ) -> None: if bytes is None and path is None: raise TypeError("path or raw bytes required for attachment") if filename is None and path is not None: @@ -52,10 +52,9 @@ def __init__( self.content_type = content_type self.add_to_transactions = add_to_transactions - def to_envelope_item(self): - # type: () -> Item + def to_envelope_item(self) -> Item: """Returns an envelope item for this attachment.""" - payload = None # type: Union[None, PayloadRef, bytes] + payload: Union[None, PayloadRef, bytes] = None if self.bytes is not None: if callable(self.bytes): payload = self.bytes() @@ -70,6 +69,5 @@ def to_envelope_item(self): filename=self.filename, ) - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return "" % (self.filename,) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 5d584a5537..ddddab488b 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -1,3 +1,4 @@ +from __future__ import annotations import os import uuid import random @@ -5,11 +6,10 @@ from collections.abc import Mapping from datetime import datetime, timezone from importlib import import_module -from typing import TYPE_CHECKING, List, Dict, cast, overload -import warnings +from typing import TYPE_CHECKING, overload import sentry_sdk -from sentry_sdk._compat import PY37, check_uwsgi_thread_support +from sentry_sdk._compat import check_uwsgi_thread_support from sentry_sdk.utils import ( AnnotatedValue, ContextVar, @@ -21,10 +21,7 @@ get_type_name, get_default_release, handle_in_app, - is_gevent, logger, - get_before_send_log, - has_logs_enabled, ) from sentry_sdk.serializer import serialize from sentry_sdk.tracing import trace @@ -33,14 +30,14 @@ SPANDATA, DEFAULT_MAX_VALUE_LENGTH, DEFAULT_OPTIONS, - INSTRUMENTER, VERSION, ClientConstructor, ) -from sentry_sdk.integrations import _DEFAULT_INTEGRATIONS, setup_integrations +from sentry_sdk.integrations import setup_integrations from sentry_sdk.integrations.dedupe import DedupeIntegration from sentry_sdk.sessions import SessionFlusher from sentry_sdk.envelope import Envelope + from sentry_sdk.profiler.continuous_profiler import setup_continuous_profiler from sentry_sdk.profiler.transaction_profiler import ( has_profiling_enabled, @@ -51,39 +48,41 @@ from sentry_sdk.monitor import Monitor if TYPE_CHECKING: - from typing import Any - from typing import Callable - from typing import Optional - from typing import Sequence - from typing import Type - from typing import Union - from typing import TypeVar + from typing import ( + Any, + Callable, + Optional, + Sequence, + Type, + Union, + TypeVar, + Dict, + ) from sentry_sdk._types import Event, Hint, SDKInfo, Log from sentry_sdk.integrations import Integration - from sentry_sdk.metrics import MetricsAggregator from sentry_sdk.scope import Scope from sentry_sdk.session import Session from sentry_sdk.spotlight import SpotlightClient from sentry_sdk.transport import Transport from sentry_sdk._log_batcher import LogBatcher - I = TypeVar("I", bound=Integration) # noqa: E741 + IntegrationType = TypeVar("IntegrationType", bound=Integration) # noqa: E741 + _client_init_debug = ContextVar("client_init_debug") -SDK_INFO = { +SDK_INFO: SDKInfo = { "name": "sentry.python", # SDK name will be overridden after integrations have been loaded with sentry_sdk.integrations.setup_integrations() "version": VERSION, "packages": [{"name": "pypi:sentry-sdk", "version": VERSION}], -} # type: SDKInfo +} -def _get_options(*args, **kwargs): - # type: (*Optional[str], **Any) -> Dict[str, Any] +def _get_options(*args: Optional[str], **kwargs: Any) -> Dict[str, Any]: if args and (isinstance(args[0], (bytes, str)) or args[0] is None): - dsn = args[0] # type: Optional[str] + dsn: Optional[str] = args[0] args = args[1:] else: dsn = None @@ -117,9 +116,6 @@ def _get_options(*args, **kwargs): if rv["server_name"] is None and hasattr(socket, "gethostname"): rv["server_name"] = socket.gethostname() - if rv["instrumenter"] is None: - rv["instrumenter"] = INSTRUMENTER.SENTRY - if rv["project_root"] is None: try: project_root = os.getcwd() @@ -128,9 +124,6 @@ def _get_options(*args, **kwargs): rv["project_root"] = project_root - if rv["enable_tracing"] is True and rv["traces_sample_rate"] is None: - rv["traces_sample_rate"] = 1.0 - if rv["event_scrubber"] is None: rv["event_scrubber"] = EventScrubber( send_default_pii=( @@ -149,24 +142,9 @@ def _get_options(*args, **kwargs): env_to_bool(os.environ.get("SENTRY_KEEP_ALIVE"), strict=True) or False ) - if rv["enable_tracing"] is not None: - warnings.warn( - "The `enable_tracing` parameter is deprecated. Please use `traces_sample_rate` instead.", - DeprecationWarning, - stacklevel=2, - ) - return rv -try: - # Python 3.6+ - module_not_found_error = ModuleNotFoundError -except Exception: - # Older Python versions - module_not_found_error = ImportError # type: ignore - - class BaseClient: """ .. versionadded:: 2.0.0 @@ -174,38 +152,31 @@ class BaseClient: The basic definition of a client that is used for sending data to Sentry. """ - spotlight = None # type: Optional[SpotlightClient] + spotlight: Optional[SpotlightClient] = None - def __init__(self, options=None): - # type: (Optional[Dict[str, Any]]) -> None - self.options = ( + def __init__(self, options: Optional[Dict[str, Any]] = None) -> None: + self.options: Dict[str, Any] = ( options if options is not None else DEFAULT_OPTIONS - ) # type: Dict[str, Any] + ) - self.transport = None # type: Optional[Transport] - self.monitor = None # type: Optional[Monitor] - self.metrics_aggregator = None # type: Optional[MetricsAggregator] - self.log_batcher = None # type: Optional[LogBatcher] + self.transport: Optional[Transport] = None + self.monitor: Optional[Monitor] = None + self.log_batcher: Optional[LogBatcher] = None - def __getstate__(self, *args, **kwargs): - # type: (*Any, **Any) -> Any + def __getstate__(self, *args: Any, **kwargs: Any) -> Any: return {"options": {}} - def __setstate__(self, *args, **kwargs): - # type: (*Any, **Any) -> None + def __setstate__(self, *args: Any, **kwargs: Any) -> None: pass @property - def dsn(self): - # type: () -> Optional[str] + def dsn(self) -> Optional[str]: return None - def should_send_default_pii(self): - # type: () -> bool + def should_send_default_pii(self) -> bool: return False - def is_active(self): - # type: () -> bool + def is_active(self) -> bool: """ .. versionadded:: 2.0.0 @@ -213,48 +184,40 @@ def is_active(self): """ return False - def capture_event(self, *args, **kwargs): - # type: (*Any, **Any) -> Optional[str] + def capture_event(self, *args: Any, **kwargs: Any) -> Optional[str]: return None - def _capture_experimental_log(self, log): - # type: (Log) -> None + def _capture_experimental_log(self, log: "Log") -> None: pass - def capture_session(self, *args, **kwargs): - # type: (*Any, **Any) -> None + def capture_session(self, *args: Any, **kwargs: Any) -> None: return None if TYPE_CHECKING: @overload - def get_integration(self, name_or_class): - # type: (str) -> Optional[Integration] - ... + def get_integration(self, name_or_class: str) -> Optional[Integration]: ... @overload - def get_integration(self, name_or_class): - # type: (type[I]) -> Optional[I] - ... + def get_integration( + self, name_or_class: type[IntegrationType] + ) -> Optional[IntegrationType]: ... - def get_integration(self, name_or_class): - # type: (Union[str, type[Integration]]) -> Optional[Integration] + def get_integration( + self, name_or_class: Union[str, type[Integration]] + ) -> Optional[Integration]: return None - def close(self, *args, **kwargs): - # type: (*Any, **Any) -> None + def close(self, *args: Any, **kwargs: Any) -> None: return None - def flush(self, *args, **kwargs): - # type: (*Any, **Any) -> None + def flush(self, *args: Any, **kwargs: Any) -> None: return None - def __enter__(self): - # type: () -> BaseClient + def __enter__(self) -> BaseClient: return self - def __exit__(self, exc_type, exc_value, tb): - # type: (Any, Any, Any) -> None + def __exit__(self, exc_type: Any, exc_value: Any, tb: Any) -> None: return None @@ -278,22 +241,20 @@ class _Client(BaseClient): Alias of :py:class:`sentry_sdk.Client`. (Was created for better intelisense support) """ - def __init__(self, *args, **kwargs): - # type: (*Any, **Any) -> None - super(_Client, self).__init__(options=get_options(*args, **kwargs)) + def __init__(self, *args: Any, **kwargs: Any) -> None: + super(_Client, self).__init__(options=_get_options(*args, **kwargs)) self._init_impl() - def __getstate__(self): - # type: () -> Any + def __getstate__(self) -> Any: return {"options": self.options} - def __setstate__(self, state): - # type: (Any) -> None + def __setstate__(self, state: Any) -> None: self.options = state["options"] self._init_impl() - def _setup_instrumentation(self, functions_to_trace): - # type: (Sequence[Dict[str, str]]) -> None + def _setup_instrumentation( + self, functions_to_trace: Sequence[Dict[str, str]] + ) -> None: """ Instruments the functions given in the list `functions_to_trace` with the `@sentry_sdk.tracing.trace` decorator. """ @@ -310,7 +271,7 @@ def _setup_instrumentation(self, functions_to_trace): function_obj = getattr(module_obj, function_name) setattr(module_obj, function_name, trace(function_obj)) logger.debug("Enabled tracing for %s", function_qualname) - except module_not_found_error: + except ModuleNotFoundError: try: # Try to import a class # ex: "mymodule.submodule.MyClassName.member_function" @@ -343,12 +304,10 @@ def _setup_instrumentation(self, functions_to_trace): e, ) - def _init_impl(self): - # type: () -> None + def _init_impl(self) -> None: old_debug = _client_init_debug.get(False) - def _capture_envelope(envelope): - # type: (Envelope) -> None + def _capture_envelope(envelope: Envelope) -> None: if self.transport is not None: self.transport.capture_envelope(envelope) @@ -363,29 +322,9 @@ def _capture_envelope(envelope): self.session_flusher = SessionFlusher(capture_func=_capture_envelope) - self.metrics_aggregator = None # type: Optional[MetricsAggregator] - experiments = self.options.get("_experiments", {}) - if experiments.get("enable_metrics", True): - # Context vars are not working correctly on Python <=3.6 - # with gevent. - metrics_supported = not is_gevent() or PY37 - if metrics_supported: - from sentry_sdk.metrics import MetricsAggregator - - self.metrics_aggregator = MetricsAggregator( - capture_func=_capture_envelope, - enable_code_locations=bool( - experiments.get("metric_code_locations", True) - ), - ) - else: - logger.info( - "Metrics not supported on Python 3.6 and lower with gevent." - ) - self.log_batcher = None - if has_logs_enabled(self.options): + if self.options.get("enable_logs") is True: from sentry_sdk._log_batcher import LogBatcher self.log_batcher = LogBatcher(capture_func=_capture_envelope) @@ -398,19 +337,6 @@ def _capture_envelope(envelope): ) ) - if self.options["_experiments"].get("otel_powered_performance", False): - logger.debug( - "[OTel] Enabling experimental OTel-powered performance monitoring." - ) - self.options["instrumenter"] = INSTRUMENTER.OTEL - if ( - "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration" - not in _DEFAULT_INTEGRATIONS - ): - _DEFAULT_INTEGRATIONS.append( - "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration", - ) - self.integrations = setup_integrations( self.options["integrations"], with_defaults=self.options["default_integrations"], @@ -462,6 +388,15 @@ def _capture_envelope(envelope): except Exception as e: logger.debug("Can not set up continuous profiler. (%s)", e) + from sentry_sdk.opentelemetry.tracing import ( + patch_readable_span, + setup_sentry_tracing, + ) + + patch_readable_span() + setup_sentry_tracing() + + logger.debug("[Tracing] Finished setting up OpenTelemetry") finally: _client_init_debug.set(old_debug) @@ -469,7 +404,6 @@ def _capture_envelope(envelope): if ( self.monitor - or self.metrics_aggregator or self.log_batcher or has_profiling_enabled(self.options) or isinstance(self.transport, BaseHttpTransport) @@ -478,8 +412,7 @@ def _capture_envelope(envelope): # need to check if it's safe to use them. check_uwsgi_thread_support() - def is_active(self): - # type: () -> bool + def is_active(self) -> bool: """ .. versionadded:: 2.0.0 @@ -487,8 +420,7 @@ def is_active(self): """ return True - def should_send_default_pii(self): - # type: () -> bool + def should_send_default_pii(self) -> bool: """ .. versionadded:: 2.0.0 @@ -497,28 +429,26 @@ def should_send_default_pii(self): return self.options.get("send_default_pii") or False @property - def dsn(self): - # type: () -> Optional[str] + def dsn(self) -> Optional[str]: """Returns the configured DSN as string.""" return self.options["dsn"] def _prepare_event( self, - event, # type: Event - hint, # type: Hint - scope, # type: Optional[Scope] - ): - # type: (...) -> Optional[Event] + event: Event, + hint: Hint, + scope: Optional[Scope], + ) -> Optional[Event]: - previous_total_spans = None # type: Optional[int] - previous_total_breadcrumbs = None # type: Optional[int] + previous_total_spans: Optional[int] = None + previous_total_breadcrumbs: Optional[int] = None if event.get("timestamp") is None: event["timestamp"] = datetime.now(timezone.utc) if scope is not None: is_transaction = event.get("type") == "transaction" - spans_before = len(cast(List[Dict[str, object]], event.get("spans", []))) + spans_before = len(event.get("spans", [])) event_ = scope.apply_to_event(event, hint, self.options) # one of the event/error processors returned None @@ -537,15 +467,13 @@ def _prepare_event( return None event = event_ - spans_delta = spans_before - len( - cast(List[Dict[str, object]], event.get("spans", [])) - ) + spans_delta = spans_before - len(event.get("spans", [])) if is_transaction and spans_delta > 0 and self.transport is not None: self.transport.record_lost_event( "event_processor", data_category="span", quantity=spans_delta ) - dropped_spans = event.pop("_dropped_spans", 0) + spans_delta # type: int + dropped_spans: int = event.pop("_dropped_spans", 0) + spans_delta if dropped_spans > 0: previous_total_spans = spans_before + dropped_spans if scope._n_breadcrumbs_truncated > 0: @@ -617,14 +545,11 @@ def _prepare_event( # Postprocess the event here so that annotated types do # generally not surface in before_send if event is not None: - event = cast( - "Event", - serialize( - cast("Dict[str, Any]", event), - max_request_body_size=self.options.get("max_request_body_size"), - max_value_length=self.options.get("max_value_length"), - custom_repr=self.options.get("custom_repr"), - ), + event: Event = serialize( # type: ignore[no-redef] + event, + max_request_body_size=self.options.get("max_request_body_size"), + max_value_length=self.options.get("max_value_length"), + custom_repr=self.options.get("custom_repr"), ) before_send = self.options["before_send"] @@ -633,7 +558,7 @@ def _prepare_event( and event is not None and event.get("type") != "transaction" ): - new_event = None + new_event: Optional["Event"] = None with capture_internal_exceptions(): new_event = before_send(event, hint or {}) if new_event is None: @@ -659,7 +584,7 @@ def _prepare_event( and event.get("type") == "transaction" ): new_event = None - spans_before = len(cast(List[Dict[str, object]], event.get("spans", []))) + spans_before = len(event.get("spans", [])) with capture_internal_exceptions(): new_event = before_send_transaction(event, hint or {}) if new_event is None: @@ -684,8 +609,7 @@ def _prepare_event( return event - def _is_ignored_error(self, event, hint): - # type: (Event, Hint) -> bool + def _is_ignored_error(self, event: Event, hint: Hint) -> bool: exc_info = hint.get("exc_info") if exc_info is None: return False @@ -708,11 +632,10 @@ def _is_ignored_error(self, event, hint): def _should_capture( self, - event, # type: Event - hint, # type: Hint - scope=None, # type: Optional[Scope] - ): - # type: (...) -> bool + event: "Event", + hint: "Hint", + scope: Optional["Scope"] = None, + ) -> bool: # Transactions are sampled independent of error events. is_transaction = event.get("type") == "transaction" if is_transaction: @@ -730,10 +653,9 @@ def _should_capture( def _should_sample_error( self, - event, # type: Event - hint, # type: Hint - ): - # type: (...) -> bool + event: Event, + hint: Hint, + ) -> bool: error_sampler = self.options.get("error_sampler", None) if callable(error_sampler): @@ -778,10 +700,9 @@ def _should_sample_error( def _update_session_from_event( self, - session, # type: Session - event, # type: Event - ): - # type: (...) -> None + session: Session, + event: Event, + ) -> None: crashed = False errored = False @@ -817,11 +738,10 @@ def _update_session_from_event( def capture_event( self, - event, # type: Event - hint=None, # type: Optional[Hint] - scope=None, # type: Optional[Scope] - ): - # type: (...) -> Optional[str] + event: Event, + hint: Optional[Hint] = None, + scope: Optional[Scope] = None, + ) -> Optional[str]: """Captures an event. :param event: A ready-made event that can be directly sent to Sentry. @@ -832,7 +752,7 @@ def capture_event( :returns: An event ID. May be `None` if there is no DSN set or of if the SDK decided to discard the event for other reasons. In such situations setting `debug=True` on `init()` may help. """ - hint = dict(hint or ()) # type: Hint + hint: Hint = dict(hint or ()) if not self._should_capture(event, hint, scope): return None @@ -867,10 +787,10 @@ def capture_event( trace_context = event_opt.get("contexts", {}).get("trace") or {} dynamic_sampling_context = trace_context.pop("dynamic_sampling_context", {}) - headers = { + headers: dict[str, object] = { "event_id": event_opt["event_id"], "sent_at": format_timestamp(datetime.now(timezone.utc)), - } # type: dict[str, object] + } if dynamic_sampling_context: headers["trace"] = dynamic_sampling_context @@ -900,9 +820,8 @@ def capture_event( return return_value - def _capture_experimental_log(self, log): - # type: (Optional[Log]) -> None - if not has_logs_enabled(self.options) or log is None: + def _capture_experimental_log(self, log: Optional[Log]) -> None: + if self.options.get("enable_logs") is not True or log is None: return current_scope = sentry_sdk.get_current_scope() @@ -928,7 +847,7 @@ def _capture_experimental_log(self, log): log["attributes"]["sentry.trace.parent_span_id"] = span.span_id if log.get("trace_id") is None: - transaction = current_scope.transaction + transaction = current_scope.root_span propagation_context = isolation_scope.get_active_propagation_context() if transaction is not None: log["trace_id"] = transaction.trace_id @@ -957,7 +876,7 @@ def _capture_experimental_log(self, log): f'[Sentry Logs] [{log.get("severity_text")}] {log.get("body")}' ) - before_send_log = get_before_send_log(self.options) + before_send_log = self.options.get("before_send_log") if before_send_log is not None: log = before_send_log(log, {}) @@ -967,10 +886,7 @@ def _capture_experimental_log(self, log): if self.log_batcher: self.log_batcher.add(log) - def capture_session( - self, session # type: Session - ): - # type: (...) -> None + def capture_session(self, session: Session) -> None: if not session.release: logger.info("Discarded session update because of missing release") else: @@ -979,19 +895,16 @@ def capture_session( if TYPE_CHECKING: @overload - def get_integration(self, name_or_class): - # type: (str) -> Optional[Integration] - ... + def get_integration(self, name_or_class: str) -> Optional[Integration]: ... @overload - def get_integration(self, name_or_class): - # type: (type[I]) -> Optional[I] - ... + def get_integration( + self, name_or_class: type[IntegrationType] + ) -> Optional[IntegrationType]: ... def get_integration( - self, name_or_class # type: Union[str, Type[Integration]] - ): - # type: (...) -> Optional[Integration] + self, name_or_class: Union[str, Type[Integration]] + ) -> Optional[Integration]: """Returns the integration for this client by name or class. If the client does not have that integration then `None` is returned. """ @@ -1006,32 +919,32 @@ def get_integration( def close( self, - timeout=None, # type: Optional[float] - callback=None, # type: Optional[Callable[[int, float], None]] - ): - # type: (...) -> None + timeout: Optional[float] = None, + callback: Optional[Callable[[int, float], None]] = None, + ) -> None: """ Close the client and shut down the transport. Arguments have the same semantics as :py:meth:`Client.flush`. """ if self.transport is not None: self.flush(timeout=timeout, callback=callback) + self.session_flusher.kill() - if self.metrics_aggregator is not None: - self.metrics_aggregator.kill() + if self.log_batcher is not None: self.log_batcher.kill() + if self.monitor: self.monitor.kill() + self.transport.kill() self.transport = None def flush( self, - timeout=None, # type: Optional[float] - callback=None, # type: Optional[Callable[[int, float], None]] - ): - # type: (...) -> None + timeout: Optional[float] = None, + callback: Optional[Callable[[int, float], None]] = None, + ) -> None: """ Wait for the current events to be sent. @@ -1043,23 +956,19 @@ def flush( if timeout is None: timeout = self.options["shutdown_timeout"] self.session_flusher.flush() - if self.metrics_aggregator is not None: - self.metrics_aggregator.flush() + if self.log_batcher is not None: self.log_batcher.flush() + self.transport.flush(timeout=timeout, callback=callback) - def __enter__(self): - # type: () -> _Client + def __enter__(self) -> _Client: return self - def __exit__(self, exc_type, exc_value, tb): - # type: (Any, Any, Any) -> None + def __exit__(self, exc_type: Any, exc_value: Any, tb: Any) -> None: self.close() -from typing import TYPE_CHECKING - if TYPE_CHECKING: # Make mypy, PyCharm and other static analyzers think `get_options` is a # type to have nicer autocompletion for params. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index d402467e5e..39e5956096 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -1,7 +1,21 @@ +from __future__ import annotations import itertools from enum import Enum from typing import TYPE_CHECKING +if TYPE_CHECKING: + from typing import ( + Optional, + Callable, + Union, + List, + Type, + Dict, + Any, + Sequence, + Tuple, + ) + # up top to prevent circular import due to integration import # This is more or less an arbitrary large-ish value for now, so that we allow # pretty long strings (like LLM prompts), but still have *some* upper limit @@ -29,17 +43,6 @@ class CompressionAlgo(Enum): if TYPE_CHECKING: - import sentry_sdk - - from typing import Optional - from typing import Callable - from typing import Union - from typing import List - from typing import Type - from typing import Dict - from typing import Any - from typing import Sequence - from typing import Tuple from typing_extensions import Literal from typing_extensions import TypedDict @@ -50,14 +53,13 @@ class CompressionAlgo(Enum): EventProcessor, Hint, Log, - MeasurementUnit, ProfilerMode, TracesSampler, TransactionProcessor, - MetricTags, - MetricValue, ) + import sentry_sdk + # Experiments are feature flags to enable and disable certain unstable SDK # functionality. Changing them from the defaults (`None`) in production # code is highly discouraged. They are not subject to any stability @@ -76,13 +78,6 @@ class CompressionAlgo(Enum): "transport_compression_algo": Optional[CompressionAlgo], "transport_num_pools": Optional[int], "transport_http2": Optional[bool], - "enable_metrics": Optional[bool], - "before_emit_metric": Optional[ - Callable[[str, MetricValue, MeasurementUnit, MetricTags], bool] - ], - "metric_code_locations": Optional[bool], - "enable_logs": Optional[bool], - "before_send_log": Optional[Callable[[Log, Hint], Optional[Log]]], }, total=False, ) @@ -100,11 +95,6 @@ class CompressionAlgo(Enum): ] -class INSTRUMENTER: - SENTRY = "sentry" - OTEL = "otel" - - class SPANDATA: """ Additional information describing the type of the span. @@ -120,6 +110,12 @@ class SPANDATA: Example: ["Smith et al. 2020", "Jones 2019"] """ + AI_COMPLETION_TOKENS_USED = "ai.completion_tokens.used" + """ + The number of output completion tokens used by the model. + Example: 10 + """ + AI_DOCUMENTS = "ai.documents" """ .. deprecated:: @@ -227,6 +223,12 @@ class SPANDATA: Example: 0.5 """ + AI_PROMPT_TOKENS_USED = "ai.prompt_tokens.used" + """ + The number of input prompt tokens used by the model. + Example: 10 + """ + AI_RAW_PROMPTING = "ai.raw_prompting" """ .. deprecated:: @@ -359,6 +361,12 @@ class SPANDATA: For an AI model call, the functions that are available """ + AI_TOTAL_TOKENS_USED = "ai.total_tokens.used" + """ + The total number of tokens (input + output) used by the request to the model. + Example: 20 + """ + AI_WARNINGS = "ai.warnings" """ .. deprecated:: @@ -798,6 +806,8 @@ class OP: HTTP_CLIENT = "http.client" HTTP_CLIENT_STREAM = "http.client.stream" HTTP_SERVER = "http.server" + HTTP = "http" + MESSAGE = "message" MIDDLEWARE_DJANGO = "middleware.django" MIDDLEWARE_LITESTAR = "middleware.litestar" MIDDLEWARE_LITESTAR_RECEIVE = "middleware.litestar.receive" @@ -827,6 +837,7 @@ class OP: QUEUE_TASK_HUEY = "queue.task.huey" QUEUE_SUBMIT_RAY = "queue.submit.ray" QUEUE_TASK_RAY = "queue.task.ray" + RPC = "rpc" SUBPROCESS = "subprocess" SUBPROCESS_WAIT = "subprocess.wait" SUBPROCESS_COMMUNICATE = "subprocess.communicate" @@ -838,79 +849,121 @@ class OP: SOCKET_DNS = "socket.dns" +BAGGAGE_HEADER_NAME = "baggage" +SENTRY_TRACE_HEADER_NAME = "sentry-trace" + +DEFAULT_SPAN_ORIGIN = "manual" +DEFAULT_SPAN_NAME = "" + + +# Transaction source +# see https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations +class TransactionSource(str, Enum): + COMPONENT = "component" + CUSTOM = "custom" + ROUTE = "route" + TASK = "task" + URL = "url" + VIEW = "view" + + def __str__(self) -> str: + return self.value + + +# These are typically high cardinality and the server hates them +LOW_QUALITY_TRANSACTION_SOURCES = [ + TransactionSource.URL, +] + +SOURCE_FOR_STYLE = { + "endpoint": TransactionSource.COMPONENT, + "function_name": TransactionSource.COMPONENT, + "handler_name": TransactionSource.COMPONENT, + "method_and_path_pattern": TransactionSource.ROUTE, + "path": TransactionSource.URL, + "route_name": TransactionSource.COMPONENT, + "route_pattern": TransactionSource.ROUTE, + "uri_template": TransactionSource.ROUTE, + "url": TransactionSource.ROUTE, +} + + # This type exists to trick mypy and PyCharm into thinking `init` and `Client` # take these arguments (even though they take opaque **kwargs) class ClientConstructor: def __init__( self, - dsn=None, # type: Optional[str] + dsn: Optional[str] = None, *, - max_breadcrumbs=DEFAULT_MAX_BREADCRUMBS, # type: int - release=None, # type: Optional[str] - environment=None, # type: Optional[str] - server_name=None, # type: Optional[str] - shutdown_timeout=2, # type: float - integrations=[], # type: Sequence[sentry_sdk.integrations.Integration] # noqa: B006 - in_app_include=[], # type: List[str] # noqa: B006 - in_app_exclude=[], # type: List[str] # noqa: B006 - default_integrations=True, # type: bool - dist=None, # type: Optional[str] - transport=None, # type: Optional[Union[sentry_sdk.transport.Transport, Type[sentry_sdk.transport.Transport], Callable[[Event], None]]] - transport_queue_size=DEFAULT_QUEUE_SIZE, # type: int - sample_rate=1.0, # type: float - send_default_pii=None, # type: Optional[bool] - http_proxy=None, # type: Optional[str] - https_proxy=None, # type: Optional[str] - ignore_errors=[], # type: Sequence[Union[type, str]] # noqa: B006 - max_request_body_size="medium", # type: str - socket_options=None, # type: Optional[List[Tuple[int, int, int | bytes]]] - keep_alive=None, # type: Optional[bool] - before_send=None, # type: Optional[EventProcessor] - before_breadcrumb=None, # type: Optional[BreadcrumbProcessor] - debug=None, # type: Optional[bool] - attach_stacktrace=False, # type: bool - ca_certs=None, # type: Optional[str] - propagate_traces=True, # type: bool - traces_sample_rate=None, # type: Optional[float] - traces_sampler=None, # type: Optional[TracesSampler] - profiles_sample_rate=None, # type: Optional[float] - profiles_sampler=None, # type: Optional[TracesSampler] - profiler_mode=None, # type: Optional[ProfilerMode] - profile_lifecycle="manual", # type: Literal["manual", "trace"] - profile_session_sample_rate=None, # type: Optional[float] - auto_enabling_integrations=True, # type: bool - disabled_integrations=None, # type: Optional[Sequence[sentry_sdk.integrations.Integration]] - auto_session_tracking=True, # type: bool - send_client_reports=True, # type: bool - _experiments={}, # type: Experiments # noqa: B006 - proxy_headers=None, # type: Optional[Dict[str, str]] - instrumenter=INSTRUMENTER.SENTRY, # type: Optional[str] - before_send_transaction=None, # type: Optional[TransactionProcessor] - project_root=None, # type: Optional[str] - enable_tracing=None, # type: Optional[bool] - include_local_variables=True, # type: Optional[bool] - include_source_context=True, # type: Optional[bool] - trace_propagation_targets=[ # noqa: B006 - MATCH_ALL - ], # type: Optional[Sequence[str]] - functions_to_trace=[], # type: Sequence[Dict[str, str]] # noqa: B006 - event_scrubber=None, # type: Optional[sentry_sdk.scrubber.EventScrubber] - max_value_length=DEFAULT_MAX_VALUE_LENGTH, # type: int - enable_backpressure_handling=True, # type: bool - error_sampler=None, # type: Optional[Callable[[Event, Hint], Union[float, bool]]] - enable_db_query_source=True, # type: bool - db_query_source_threshold_ms=100, # type: int - spotlight=None, # type: Optional[Union[bool, str]] - cert_file=None, # type: Optional[str] - key_file=None, # type: Optional[str] - custom_repr=None, # type: Optional[Callable[..., Optional[str]]] - add_full_stack=DEFAULT_ADD_FULL_STACK, # type: bool - max_stack_frames=DEFAULT_MAX_STACK_FRAMES, # type: Optional[int] - enable_logs=False, # type: bool - before_send_log=None, # type: Optional[Callable[[Log, Hint], Optional[Log]]] - ): - # type: (...) -> None + max_breadcrumbs: int = DEFAULT_MAX_BREADCRUMBS, + release: Optional[str] = None, + environment: Optional[str] = None, + server_name: Optional[str] = None, + shutdown_timeout: float = 2, + integrations: Sequence[sentry_sdk.integrations.Integration] = [], # noqa: B006 + in_app_include: List[str] = [], # noqa: B006 + in_app_exclude: List[str] = [], # noqa: B006 + default_integrations: bool = True, + dist: Optional[str] = None, + transport: Optional[ + Union[ + sentry_sdk.transport.Transport, + Type[sentry_sdk.transport.Transport], + Callable[[Event], None], + ] + ] = None, + transport_queue_size: int = DEFAULT_QUEUE_SIZE, + sample_rate: float = 1.0, + send_default_pii: Optional[bool] = None, + http_proxy: Optional[str] = None, + https_proxy: Optional[str] = None, + ignore_errors: Sequence[Union[type, str]] = [], # noqa: B006 + max_request_body_size: str = "medium", + socket_options: Optional[List[Tuple[int, int, int | bytes]]] = None, + keep_alive: Optional[bool] = None, + before_send: Optional[EventProcessor] = None, + before_breadcrumb: Optional[BreadcrumbProcessor] = None, + debug: Optional[bool] = None, + attach_stacktrace: bool = False, + ca_certs: Optional[str] = None, + traces_sample_rate: Optional[float] = None, + traces_sampler: Optional[TracesSampler] = None, + profiles_sample_rate: Optional[float] = None, + profiles_sampler: Optional[TracesSampler] = None, + profiler_mode: Optional[ProfilerMode] = None, + profile_lifecycle: Literal["manual", "trace"] = "manual", + profile_session_sample_rate: Optional[float] = None, + auto_enabling_integrations: bool = True, + disabled_integrations: Optional[ + Sequence[sentry_sdk.integrations.Integration] + ] = None, + auto_session_tracking: bool = True, + send_client_reports: bool = True, + _experiments: Experiments = {}, # noqa: B006 + proxy_headers: Optional[Dict[str, str]] = None, + before_send_transaction: Optional[TransactionProcessor] = None, + project_root: Optional[str] = None, + include_local_variables: Optional[bool] = True, + include_source_context: Optional[bool] = True, + trace_propagation_targets: Optional[Sequence[str]] = [MATCH_ALL], # noqa: B006 + exclude_span_origins: Optional[Sequence[str]] = None, + functions_to_trace: Sequence[Dict[str, str]] = [], # noqa: B006 + event_scrubber: Optional[sentry_sdk.scrubber.EventScrubber] = None, + max_value_length: int = DEFAULT_MAX_VALUE_LENGTH, + enable_backpressure_handling: bool = True, + error_sampler: Optional[Callable[[Event, Hint], Union[float, bool]]] = None, + enable_db_query_source: bool = True, + db_query_source_threshold_ms: int = 100, + spotlight: Optional[Union[bool, str]] = None, + cert_file: Optional[str] = None, + key_file: Optional[str] = None, + custom_repr: Optional[Callable[..., Optional[str]]] = None, + add_full_stack: bool = DEFAULT_ADD_FULL_STACK, + max_stack_frames: Optional[int] = DEFAULT_MAX_STACK_FRAMES, + enable_logs: bool = False, + before_send_log: Optional[Callable[[Log, Hint], Optional[Log]]] = None, + ) -> None: """Initialize the Sentry SDK with the given parameters. All parameters described here can be used in a call to `sentry_sdk.init()`. :param dsn: The DSN tells the SDK where to send the events. @@ -1227,6 +1280,17 @@ def __init__( If `trace_propagation_targets` is not provided, trace data is attached to every outgoing request from the instrumented client. + :param exclude_span_origins: An optional list of strings or regex patterns to disable span creation based + on span origin. When a span's origin would match any of the provided patterns, the span will not be + created. + + This can be useful to exclude automatic span creation from specific integrations without disabling the + entire integration. + + The option may contain a list of strings or regexes against which the span origins are matched. + String entries do not have to be full matches, meaning a span origin is matched when it contains + a string provided through the option. + :param functions_to_trace: An optional list of functions that should be set up for tracing. For each function in the list, a span will be created when the function is executed. @@ -1279,10 +1343,6 @@ def __init__( :param profile_session_sample_rate: - :param enable_tracing: - - :param propagate_traces: - :param auto_session_tracking: :param spotlight: @@ -1302,8 +1362,7 @@ def __init__( pass -def _get_default_options(): - # type: () -> dict[str, Any] +def _get_default_options() -> dict[str, Any]: import inspect a = inspect.getfullargspec(ClientConstructor.__init__) @@ -1322,4 +1381,4 @@ def _get_default_options(): del _get_default_options -VERSION = "2.34.1" +VERSION = "3.0.0a5" diff --git a/sentry_sdk/crons/api.py b/sentry_sdk/crons/api.py index 20e95685a7..cbe8b92834 100644 --- a/sentry_sdk/crons/api.py +++ b/sentry_sdk/crons/api.py @@ -1,3 +1,4 @@ +from __future__ import annotations import uuid import sentry_sdk @@ -10,17 +11,16 @@ def _create_check_in_event( - monitor_slug=None, # type: Optional[str] - check_in_id=None, # type: Optional[str] - status=None, # type: Optional[str] - duration_s=None, # type: Optional[float] - monitor_config=None, # type: Optional[MonitorConfig] -): - # type: (...) -> Event + monitor_slug: Optional[str] = None, + check_in_id: Optional[str] = None, + status: Optional[str] = None, + duration_s: Optional[float] = None, + monitor_config: Optional[MonitorConfig] = None, +) -> Event: options = sentry_sdk.get_client().options - check_in_id = check_in_id or uuid.uuid4().hex # type: str + check_in_id = check_in_id or uuid.uuid4().hex - check_in = { + check_in: Event = { "type": "check_in", "monitor_slug": monitor_slug, "check_in_id": check_in_id, @@ -28,7 +28,7 @@ def _create_check_in_event( "duration": duration_s, "environment": options.get("environment", None), "release": options.get("release", None), - } # type: Event + } if monitor_config: check_in["monitor_config"] = monitor_config @@ -37,13 +37,12 @@ def _create_check_in_event( def capture_checkin( - monitor_slug=None, # type: Optional[str] - check_in_id=None, # type: Optional[str] - status=None, # type: Optional[str] - duration=None, # type: Optional[float] - monitor_config=None, # type: Optional[MonitorConfig] -): - # type: (...) -> str + monitor_slug: Optional[str] = None, + check_in_id: Optional[str] = None, + status: Optional[str] = None, + duration: Optional[float] = None, + monitor_config: Optional[MonitorConfig] = None, +) -> str: check_in_event = _create_check_in_event( monitor_slug=monitor_slug, check_in_id=check_in_id, diff --git a/sentry_sdk/crons/decorator.py b/sentry_sdk/crons/decorator.py index 9af00e61c0..50078a2dba 100644 --- a/sentry_sdk/crons/decorator.py +++ b/sentry_sdk/crons/decorator.py @@ -1,3 +1,4 @@ +from __future__ import annotations from functools import wraps from inspect import iscoroutinefunction @@ -16,8 +17,6 @@ ParamSpec, Type, TypeVar, - Union, - cast, overload, ) from sentry_sdk._types import MonitorConfig @@ -55,13 +54,15 @@ def test(arg): ``` """ - def __init__(self, monitor_slug=None, monitor_config=None): - # type: (Optional[str], Optional[MonitorConfig]) -> None + def __init__( + self, + monitor_slug: Optional[str] = None, + monitor_config: Optional[MonitorConfig] = None, + ) -> None: self.monitor_slug = monitor_slug self.monitor_config = monitor_config - def __enter__(self): - # type: () -> None + def __enter__(self) -> None: self.start_timestamp = now() self.check_in_id = capture_checkin( monitor_slug=self.monitor_slug, @@ -69,8 +70,12 @@ def __enter__(self): monitor_config=self.monitor_config, ) - def __exit__(self, exc_type, exc_value, traceback): - # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]) -> None + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: duration_s = now() - self.start_timestamp if exc_type is None and exc_value is None and traceback is None: @@ -89,46 +94,39 @@ def __exit__(self, exc_type, exc_value, traceback): if TYPE_CHECKING: @overload - def __call__(self, fn): - # type: (Callable[P, Awaitable[Any]]) -> Callable[P, Awaitable[Any]] + def __call__( + self, fn: Callable[P, Awaitable[Any]] + ) -> Callable[P, Awaitable[Any]]: # Unfortunately, mypy does not give us any reliable way to type check the # return value of an Awaitable (i.e. async function) for this overload, # since calling iscouroutinefunction narrows the type to Callable[P, Awaitable[Any]]. ... @overload - def __call__(self, fn): - # type: (Callable[P, R]) -> Callable[P, R] - ... + def __call__(self, fn: Callable[P, R]) -> Callable[P, R]: ... def __call__( self, - fn, # type: Union[Callable[P, R], Callable[P, Awaitable[Any]]] - ): - # type: (...) -> Union[Callable[P, R], Callable[P, Awaitable[Any]]] + fn: Callable[..., Any], + ) -> Callable[..., Any]: if iscoroutinefunction(fn): return self._async_wrapper(fn) - else: - if TYPE_CHECKING: - fn = cast("Callable[P, R]", fn) return self._sync_wrapper(fn) - def _async_wrapper(self, fn): - # type: (Callable[P, Awaitable[Any]]) -> Callable[P, Awaitable[Any]] + def _async_wrapper( + self, fn: Callable[P, Awaitable[Any]] + ) -> Callable[P, Awaitable[Any]]: @wraps(fn) - async def inner(*args: "P.args", **kwargs: "P.kwargs"): - # type: (...) -> R + async def inner(*args: P.args, **kwargs: P.kwargs) -> R: with self: return await fn(*args, **kwargs) return inner - def _sync_wrapper(self, fn): - # type: (Callable[P, R]) -> Callable[P, R] + def _sync_wrapper(self, fn: Callable[P, R]) -> Callable[P, R]: @wraps(fn) - def inner(*args: "P.args", **kwargs: "P.kwargs"): - # type: (...) -> R + def inner(*args: P.args, **kwargs: P.kwargs) -> R: with self: return fn(*args, **kwargs) diff --git a/sentry_sdk/debug.py b/sentry_sdk/debug.py index e4c686a3e8..5564bb5ea3 100644 --- a/sentry_sdk/debug.py +++ b/sentry_sdk/debug.py @@ -1,6 +1,6 @@ +from __future__ import annotations import sys import logging -import warnings from sentry_sdk import get_client from sentry_sdk.client import _client_init_debug @@ -9,33 +9,21 @@ class _DebugFilter(logging.Filter): - def filter(self, record): - # type: (LogRecord) -> bool + def filter(self, record: LogRecord) -> bool: if _client_init_debug.get(False): return True return get_client().options["debug"] -def init_debug_support(): - # type: () -> None +def init_debug_support() -> None: if not logger.handlers: configure_logger() -def configure_logger(): - # type: () -> None +def configure_logger() -> None: _handler = logging.StreamHandler(sys.stderr) _handler.setFormatter(logging.Formatter(" [sentry] %(levelname)s: %(message)s")) logger.addHandler(_handler) logger.setLevel(logging.DEBUG) logger.addFilter(_DebugFilter()) - - -def configure_debug_hub(): - # type: () -> None - warnings.warn( - "configure_debug_hub is deprecated. Please remove calls to it, as it is a no-op.", - DeprecationWarning, - stacklevel=2, - ) diff --git a/sentry_sdk/envelope.py b/sentry_sdk/envelope.py index 5f7220bf21..c532191202 100644 --- a/sentry_sdk/envelope.py +++ b/sentry_sdk/envelope.py @@ -1,3 +1,4 @@ +from __future__ import annotations import io import json import mimetypes @@ -8,18 +9,11 @@ from typing import TYPE_CHECKING if TYPE_CHECKING: - from typing import Any - from typing import Optional - from typing import Union - from typing import Dict - from typing import List - from typing import Iterator - from sentry_sdk._types import Event, EventDataCategory + from typing import Any, Optional, Union, Dict, List, Iterator -def parse_json(data): - # type: (Union[bytes, str]) -> Any +def parse_json(data: Union[bytes, str]) -> Any: # on some python 3 versions this needs to be bytes if isinstance(data, bytes): data = data.decode("utf-8", "replace") @@ -35,10 +29,9 @@ class Envelope: def __init__( self, - headers=None, # type: Optional[Dict[str, Any]] - items=None, # type: Optional[List[Item]] - ): - # type: (...) -> None + headers: Optional[Dict[str, Any]] = None, + items: Optional[List[Item]] = None, + ) -> None: if headers is not None: headers = dict(headers) self.headers = headers or {} @@ -49,35 +42,22 @@ def __init__( self.items = items @property - def description(self): - # type: (...) -> str + def description(self) -> str: return "envelope with %s items (%s)" % ( len(self.items), ", ".join(x.data_category for x in self.items), ) - def add_event( - self, event # type: Event - ): - # type: (...) -> None + def add_event(self, event: Event) -> None: self.add_item(Item(payload=PayloadRef(json=event), type="event")) - def add_transaction( - self, transaction # type: Event - ): - # type: (...) -> None + def add_transaction(self, transaction: Event) -> None: self.add_item(Item(payload=PayloadRef(json=transaction), type="transaction")) - def add_profile( - self, profile # type: Any - ): - # type: (...) -> None + def add_profile(self, profile: Any) -> None: self.add_item(Item(payload=PayloadRef(json=profile), type="profile")) - def add_profile_chunk( - self, profile_chunk # type: Any - ): - # type: (...) -> None + def add_profile_chunk(self, profile_chunk: Any) -> None: self.add_item( Item( payload=PayloadRef(json=profile_chunk), @@ -86,72 +66,50 @@ def add_profile_chunk( ) ) - def add_checkin( - self, checkin # type: Any - ): - # type: (...) -> None + def add_checkin(self, checkin: Any) -> None: self.add_item(Item(payload=PayloadRef(json=checkin), type="check_in")) - def add_session( - self, session # type: Union[Session, Any] - ): - # type: (...) -> None + def add_session(self, session: Union[Session, Any]) -> None: if isinstance(session, Session): session = session.to_json() self.add_item(Item(payload=PayloadRef(json=session), type="session")) - def add_sessions( - self, sessions # type: Any - ): - # type: (...) -> None + def add_sessions(self, sessions: Any) -> None: self.add_item(Item(payload=PayloadRef(json=sessions), type="sessions")) - def add_item( - self, item # type: Item - ): - # type: (...) -> None + def add_item(self, item: Item) -> None: self.items.append(item) - def get_event(self): - # type: (...) -> Optional[Event] + def get_event(self) -> Optional[Event]: for items in self.items: event = items.get_event() if event is not None: return event return None - def get_transaction_event(self): - # type: (...) -> Optional[Event] + def get_transaction_event(self) -> Optional[Event]: for item in self.items: event = item.get_transaction_event() if event is not None: return event return None - def __iter__(self): - # type: (...) -> Iterator[Item] + def __iter__(self) -> Iterator[Item]: return iter(self.items) - def serialize_into( - self, f # type: Any - ): - # type: (...) -> None + def serialize_into(self, f: Any) -> None: f.write(json_dumps(self.headers)) f.write(b"\n") for item in self.items: item.serialize_into(f) - def serialize(self): - # type: (...) -> bytes + def serialize(self) -> bytes: out = io.BytesIO() self.serialize_into(out) return out.getvalue() @classmethod - def deserialize_from( - cls, f # type: Any - ): - # type: (...) -> Envelope + def deserialize_from(cls, f: Any) -> Envelope: headers = parse_json(f.readline()) items = [] while 1: @@ -162,31 +120,25 @@ def deserialize_from( return cls(headers=headers, items=items) @classmethod - def deserialize( - cls, bytes # type: bytes - ): - # type: (...) -> Envelope + def deserialize(cls, bytes: bytes) -> Envelope: return cls.deserialize_from(io.BytesIO(bytes)) - def __repr__(self): - # type: (...) -> str + def __repr__(self) -> str: return "" % (self.headers, self.items) class PayloadRef: def __init__( self, - bytes=None, # type: Optional[bytes] - path=None, # type: Optional[Union[bytes, str]] - json=None, # type: Optional[Any] - ): - # type: (...) -> None + bytes: Optional[bytes] = None, + path: Optional[Union[bytes, str]] = None, + json: Optional[Any] = None, + ) -> None: self.json = json self.bytes = bytes self.path = path - def get_bytes(self): - # type: (...) -> bytes + def get_bytes(self) -> bytes: if self.bytes is None: if self.path is not None: with capture_internal_exceptions(): @@ -197,8 +149,7 @@ def get_bytes(self): return self.bytes or b"" @property - def inferred_content_type(self): - # type: (...) -> str + def inferred_content_type(self) -> str: if self.json is not None: return "application/json" elif self.path is not None: @@ -210,20 +161,19 @@ def inferred_content_type(self): return ty return "application/octet-stream" - def __repr__(self): - # type: (...) -> str + def __repr__(self) -> str: return "" % (self.inferred_content_type,) class Item: def __init__( self, - payload, # type: Union[bytes, str, PayloadRef] - headers=None, # type: Optional[Dict[str, Any]] - type=None, # type: Optional[str] - content_type=None, # type: Optional[str] - filename=None, # type: Optional[str] - ): + payload: Union[bytes, str, PayloadRef], + headers: Optional[Dict[str, Any]] = None, + type: Optional[str] = None, + content_type: Optional[str] = None, + filename: Optional[str] = None, + ) -> None: if headers is not None: headers = dict(headers) elif headers is None: @@ -247,8 +197,7 @@ def __init__( self.payload = payload - def __repr__(self): - # type: (...) -> str + def __repr__(self) -> str: return "" % ( self.headers, self.payload, @@ -256,13 +205,11 @@ def __repr__(self): ) @property - def type(self): - # type: (...) -> Optional[str] + def type(self) -> Optional[str]: return self.headers.get("type") @property - def data_category(self): - # type: (...) -> EventDataCategory + def data_category(self) -> EventDataCategory: ty = self.headers.get("type") if ty == "session" or ty == "sessions": return "session" @@ -280,19 +227,15 @@ def data_category(self): return "profile" elif ty == "profile_chunk": return "profile_chunk" - elif ty == "statsd": - return "metric_bucket" elif ty == "check_in": return "monitor" else: return "default" - def get_bytes(self): - # type: (...) -> bytes + def get_bytes(self) -> bytes: return self.payload.get_bytes() - def get_event(self): - # type: (...) -> Optional[Event] + def get_event(self) -> Optional[Event]: """ Returns an error event if there is one. """ @@ -300,16 +243,12 @@ def get_event(self): return self.payload.json return None - def get_transaction_event(self): - # type: (...) -> Optional[Event] + def get_transaction_event(self) -> Optional[Event]: if self.type == "transaction" and self.payload.json is not None: return self.payload.json return None - def serialize_into( - self, f # type: Any - ): - # type: (...) -> None + def serialize_into(self, f: Any) -> None: headers = dict(self.headers) bytes = self.get_bytes() headers["length"] = len(bytes) @@ -318,17 +257,13 @@ def serialize_into( f.write(bytes) f.write(b"\n") - def serialize(self): - # type: (...) -> bytes + def serialize(self) -> bytes: out = io.BytesIO() self.serialize_into(out) return out.getvalue() @classmethod - def deserialize_from( - cls, f # type: Any - ): - # type: (...) -> Optional[Item] + def deserialize_from(cls, f: Any) -> Optional[Item]: line = f.readline().rstrip() if not line: return None @@ -341,15 +276,12 @@ def deserialize_from( # if no length was specified we need to read up to the end of line # and remove it (if it is present, i.e. not the very last char in an eof terminated envelope) payload = f.readline().rstrip(b"\n") - if headers.get("type") in ("event", "transaction", "metric_buckets"): + if headers.get("type") in ("event", "transaction"): rv = cls(headers=headers, payload=PayloadRef(json=parse_json(payload))) else: rv = cls(headers=headers, payload=payload) return rv @classmethod - def deserialize( - cls, bytes # type: bytes - ): - # type: (...) -> Optional[Item] + def deserialize(cls, bytes: bytes) -> Optional[Item]: return cls.deserialize_from(io.BytesIO(bytes)) diff --git a/sentry_sdk/feature_flags.py b/sentry_sdk/feature_flags.py index eb53acae5d..2f0660a80f 100644 --- a/sentry_sdk/feature_flags.py +++ b/sentry_sdk/feature_flags.py @@ -1,23 +1,22 @@ +from __future__ import annotations import copy import sentry_sdk from sentry_sdk._lru_cache import LRUCache from threading import Lock -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING if TYPE_CHECKING: - from typing import TypedDict + from typing import Any, TypedDict FlagData = TypedDict("FlagData", {"flag": str, "result": bool}) - DEFAULT_FLAG_CAPACITY = 100 class FlagBuffer: - def __init__(self, capacity): - # type: (int) -> None + def __init__(self, capacity: int) -> None: self.capacity = capacity self.lock = Lock() @@ -25,26 +24,22 @@ def __init__(self, capacity): # directly you're on your own! self.__buffer = LRUCache(capacity) - def clear(self): - # type: () -> None + def clear(self) -> None: self.__buffer = LRUCache(self.capacity) - def __deepcopy__(self, memo): - # type: (dict[int, Any]) -> FlagBuffer + def __deepcopy__(self, memo: dict[int, Any]) -> FlagBuffer: with self.lock: buffer = FlagBuffer(self.capacity) buffer.__buffer = copy.deepcopy(self.__buffer, memo) return buffer - def get(self): - # type: () -> list[FlagData] + def get(self) -> list[FlagData]: with self.lock: return [ {"flag": key, "result": value} for key, value in self.__buffer.get_all() ] - def set(self, flag, result): - # type: (str, bool) -> None + def set(self, flag: str, result: bool) -> None: if isinstance(result, FlagBuffer): # If someone were to insert `self` into `self` this would create a circular dependency # on the lock. This is of course a deadlock. However, this is far outside the expected @@ -58,8 +53,7 @@ def set(self, flag, result): self.__buffer.set(flag, result) -def add_feature_flag(flag, result): - # type: (str, bool) -> None +def add_feature_flag(flag: str, result: bool) -> None: """ Records a flag and its value to be sent on subsequent error events. We recommend you do this on flag evaluations. Flags are buffered per Sentry scope. @@ -69,4 +63,4 @@ def add_feature_flag(flag, result): span = sentry_sdk.get_current_span() if span: - span.set_flag(f"flag.evaluation.{flag}", result) + span.set_flag(flag, result) diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py deleted file mode 100644 index 7fda9202df..0000000000 --- a/sentry_sdk/hub.py +++ /dev/null @@ -1,739 +0,0 @@ -import warnings -from contextlib import contextmanager - -from sentry_sdk import ( - get_client, - get_global_scope, - get_isolation_scope, - get_current_scope, -) -from sentry_sdk._compat import with_metaclass -from sentry_sdk.consts import INSTRUMENTER -from sentry_sdk.scope import _ScopeManager -from sentry_sdk.client import Client -from sentry_sdk.tracing import ( - NoOpSpan, - Span, - Transaction, -) - -from sentry_sdk.utils import ( - logger, - ContextVar, -) - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from typing import Any - from typing import Callable - from typing import ContextManager - from typing import Dict - from typing import Generator - from typing import List - from typing import Optional - from typing import overload - from typing import Tuple - from typing import Type - from typing import TypeVar - from typing import Union - - from typing_extensions import Unpack - - from sentry_sdk.scope import Scope - from sentry_sdk.client import BaseClient - from sentry_sdk.integrations import Integration - from sentry_sdk._types import ( - Event, - Hint, - Breadcrumb, - BreadcrumbHint, - ExcInfo, - LogLevelStr, - SamplingContext, - ) - from sentry_sdk.tracing import TransactionKwargs - - T = TypeVar("T") - -else: - - def overload(x): - # type: (T) -> T - return x - - -class SentryHubDeprecationWarning(DeprecationWarning): - """ - A custom deprecation warning to inform users that the Hub is deprecated. - """ - - _MESSAGE = ( - "`sentry_sdk.Hub` is deprecated and will be removed in a future major release. " - "Please consult our 1.x to 2.x migration guide for details on how to migrate " - "`Hub` usage to the new API: " - "https://docs.sentry.io/platforms/python/migration/1.x-to-2.x" - ) - - def __init__(self, *_): - # type: (*object) -> None - super().__init__(self._MESSAGE) - - -@contextmanager -def _suppress_hub_deprecation_warning(): - # type: () -> Generator[None, None, None] - """Utility function to suppress deprecation warnings for the Hub.""" - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", category=SentryHubDeprecationWarning) - yield - - -_local = ContextVar("sentry_current_hub") - - -class HubMeta(type): - @property - def current(cls): - # type: () -> Hub - """Returns the current instance of the hub.""" - warnings.warn(SentryHubDeprecationWarning(), stacklevel=2) - rv = _local.get(None) - if rv is None: - with _suppress_hub_deprecation_warning(): - # This will raise a deprecation warning; suppress it since we already warned above. - rv = Hub(GLOBAL_HUB) - _local.set(rv) - return rv - - @property - def main(cls): - # type: () -> Hub - """Returns the main instance of the hub.""" - warnings.warn(SentryHubDeprecationWarning(), stacklevel=2) - return GLOBAL_HUB - - -class Hub(with_metaclass(HubMeta)): # type: ignore - """ - .. deprecated:: 2.0.0 - The Hub is deprecated. Its functionality will be merged into :py:class:`sentry_sdk.scope.Scope`. - - The hub wraps the concurrency management of the SDK. Each thread has - its own hub but the hub might transfer with the flow of execution if - context vars are available. - - If the hub is used with a with statement it's temporarily activated. - """ - - _stack = None # type: List[Tuple[Optional[Client], Scope]] - _scope = None # type: Optional[Scope] - - # Mypy doesn't pick up on the metaclass. - - if TYPE_CHECKING: - current = None # type: Hub - main = None # type: Hub - - def __init__( - self, - client_or_hub=None, # type: Optional[Union[Hub, Client]] - scope=None, # type: Optional[Any] - ): - # type: (...) -> None - warnings.warn(SentryHubDeprecationWarning(), stacklevel=2) - - current_scope = None - - if isinstance(client_or_hub, Hub): - client = get_client() - if scope is None: - # hub cloning is going on, we use a fork of the current/isolation scope for context manager - scope = get_isolation_scope().fork() - current_scope = get_current_scope().fork() - else: - client = client_or_hub # type: ignore - get_global_scope().set_client(client) - - if scope is None: # so there is no Hub cloning going on - # just the current isolation scope is used for context manager - scope = get_isolation_scope() - current_scope = get_current_scope() - - if current_scope is None: - # just the current current scope is used for context manager - current_scope = get_current_scope() - - self._stack = [(client, scope)] # type: ignore - self._last_event_id = None # type: Optional[str] - self._old_hubs = [] # type: List[Hub] - - self._old_current_scopes = [] # type: List[Scope] - self._old_isolation_scopes = [] # type: List[Scope] - self._current_scope = current_scope # type: Scope - self._scope = scope # type: Scope - - def __enter__(self): - # type: () -> Hub - self._old_hubs.append(Hub.current) - _local.set(self) - - current_scope = get_current_scope() - self._old_current_scopes.append(current_scope) - scope._current_scope.set(self._current_scope) - - isolation_scope = get_isolation_scope() - self._old_isolation_scopes.append(isolation_scope) - scope._isolation_scope.set(self._scope) - - return self - - def __exit__( - self, - exc_type, # type: Optional[type] - exc_value, # type: Optional[BaseException] - tb, # type: Optional[Any] - ): - # type: (...) -> None - old = self._old_hubs.pop() - _local.set(old) - - old_current_scope = self._old_current_scopes.pop() - scope._current_scope.set(old_current_scope) - - old_isolation_scope = self._old_isolation_scopes.pop() - scope._isolation_scope.set(old_isolation_scope) - - def run( - self, callback # type: Callable[[], T] - ): - # type: (...) -> T - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - - Runs a callback in the context of the hub. Alternatively the - with statement can be used on the hub directly. - """ - with self: - return callback() - - def get_integration( - self, name_or_class # type: Union[str, Type[Integration]] - ): - # type: (...) -> Any - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.client._Client.get_integration` instead. - - Returns the integration for this hub by name or class. If there - is no client bound or the client does not have that integration - then `None` is returned. - - If the return value is not `None` the hub is guaranteed to have a - client attached. - """ - return get_client().get_integration(name_or_class) - - @property - def client(self): - # type: () -> Optional[BaseClient] - """ - .. deprecated:: 2.0.0 - This property is deprecated and will be removed in a future release. - Please use :py:func:`sentry_sdk.api.get_client` instead. - - Returns the current client on the hub. - """ - client = get_client() - - if not client.is_active(): - return None - - return client - - @property - def scope(self): - # type: () -> Scope - """ - .. deprecated:: 2.0.0 - This property is deprecated and will be removed in a future release. - Returns the current scope on the hub. - """ - return get_isolation_scope() - - def last_event_id(self): - # type: () -> Optional[str] - """ - Returns the last event ID. - - .. deprecated:: 1.40.5 - This function is deprecated and will be removed in a future release. The functions `capture_event`, `capture_message`, and `capture_exception` return the event ID directly. - """ - logger.warning( - "Deprecated: last_event_id is deprecated. This will be removed in the future. The functions `capture_event`, `capture_message`, and `capture_exception` return the event ID directly." - ) - return self._last_event_id - - def bind_client( - self, new # type: Optional[BaseClient] - ): - # type: (...) -> None - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.set_client` instead. - - Binds a new client to the hub. - """ - get_global_scope().set_client(new) - - def capture_event(self, event, hint=None, scope=None, **scope_kwargs): - # type: (Event, Optional[Hint], Optional[Scope], Any) -> Optional[str] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.capture_event` instead. - - Captures an event. - - Alias of :py:meth:`sentry_sdk.Scope.capture_event`. - - :param event: A ready-made event that can be directly sent to Sentry. - - :param hint: Contains metadata about the event that can be read from `before_send`, such as the original exception object or a HTTP request object. - - :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events. - The `scope` and `scope_kwargs` parameters are mutually exclusive. - - :param scope_kwargs: Optional data to apply to event. - For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`. - The `scope` and `scope_kwargs` parameters are mutually exclusive. - """ - last_event_id = get_current_scope().capture_event( - event, hint, scope=scope, **scope_kwargs - ) - - is_transaction = event.get("type") == "transaction" - if last_event_id is not None and not is_transaction: - self._last_event_id = last_event_id - - return last_event_id - - def capture_message(self, message, level=None, scope=None, **scope_kwargs): - # type: (str, Optional[LogLevelStr], Optional[Scope], Any) -> Optional[str] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.capture_message` instead. - - Captures a message. - - Alias of :py:meth:`sentry_sdk.Scope.capture_message`. - - :param message: The string to send as the message to Sentry. - - :param level: If no level is provided, the default level is `info`. - - :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events. - The `scope` and `scope_kwargs` parameters are mutually exclusive. - - :param scope_kwargs: Optional data to apply to event. - For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`. - The `scope` and `scope_kwargs` parameters are mutually exclusive. - - :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`). - """ - last_event_id = get_current_scope().capture_message( - message, level=level, scope=scope, **scope_kwargs - ) - - if last_event_id is not None: - self._last_event_id = last_event_id - - return last_event_id - - def capture_exception(self, error=None, scope=None, **scope_kwargs): - # type: (Optional[Union[BaseException, ExcInfo]], Optional[Scope], Any) -> Optional[str] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.capture_exception` instead. - - Captures an exception. - - Alias of :py:meth:`sentry_sdk.Scope.capture_exception`. - - :param error: An exception to capture. If `None`, `sys.exc_info()` will be used. - - :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events. - The `scope` and `scope_kwargs` parameters are mutually exclusive. - - :param scope_kwargs: Optional data to apply to event. - For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`. - The `scope` and `scope_kwargs` parameters are mutually exclusive. - - :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`). - """ - last_event_id = get_current_scope().capture_exception( - error, scope=scope, **scope_kwargs - ) - - if last_event_id is not None: - self._last_event_id = last_event_id - - return last_event_id - - def add_breadcrumb(self, crumb=None, hint=None, **kwargs): - # type: (Optional[Breadcrumb], Optional[BreadcrumbHint], Any) -> None - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.add_breadcrumb` instead. - - Adds a breadcrumb. - - :param crumb: Dictionary with the data as the sentry v7/v8 protocol expects. - - :param hint: An optional value that can be used by `before_breadcrumb` - to customize the breadcrumbs that are emitted. - """ - get_isolation_scope().add_breadcrumb(crumb, hint, **kwargs) - - def start_span(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): - # type: (str, Any) -> Span - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.start_span` instead. - - Start a span whose parent is the currently active span or transaction, if any. - - The return value is a :py:class:`sentry_sdk.tracing.Span` instance, - typically used as a context manager to start and stop timing in a `with` - block. - - Only spans contained in a transaction are sent to Sentry. Most - integrations start a transaction at the appropriate time, for example - for every incoming HTTP request. Use - :py:meth:`sentry_sdk.start_transaction` to start a new transaction when - one is not already in progress. - - For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`. - """ - scope = get_current_scope() - return scope.start_span(instrumenter=instrumenter, **kwargs) - - def start_transaction( - self, - transaction=None, - instrumenter=INSTRUMENTER.SENTRY, - custom_sampling_context=None, - **kwargs - ): - # type: (Optional[Transaction], str, Optional[SamplingContext], Unpack[TransactionKwargs]) -> Union[Transaction, NoOpSpan] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.start_transaction` instead. - - Start and return a transaction. - - Start an existing transaction if given, otherwise create and start a new - transaction with kwargs. - - This is the entry point to manual tracing instrumentation. - - A tree structure can be built by adding child spans to the transaction, - and child spans to other spans. To start a new child span within the - transaction or any span, call the respective `.start_child()` method. - - Every child span must be finished before the transaction is finished, - otherwise the unfinished spans are discarded. - - When used as context managers, spans and transactions are automatically - finished at the end of the `with` block. If not using context managers, - call the `.finish()` method. - - When the transaction is finished, it will be sent to Sentry with all its - finished child spans. - - For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Transaction`. - """ - scope = get_current_scope() - - # For backwards compatibility, we allow passing the scope as the hub. - # We need a major release to make this nice. (if someone searches the code: deprecated) - # Type checking disabled for this line because deprecated keys are not allowed in the type signature. - kwargs["hub"] = scope # type: ignore - - return scope.start_transaction( - transaction, instrumenter, custom_sampling_context, **kwargs - ) - - def continue_trace(self, environ_or_headers, op=None, name=None, source=None): - # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str]) -> Transaction - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.continue_trace` instead. - - Sets the propagation context from environment or headers and returns a transaction. - """ - return get_isolation_scope().continue_trace( - environ_or_headers=environ_or_headers, op=op, name=name, source=source - ) - - @overload - def push_scope( - self, callback=None # type: Optional[None] - ): - # type: (...) -> ContextManager[Scope] - pass - - @overload - def push_scope( # noqa: F811 - self, callback # type: Callable[[Scope], None] - ): - # type: (...) -> None - pass - - def push_scope( # noqa - self, - callback=None, # type: Optional[Callable[[Scope], None]] - continue_trace=True, # type: bool - ): - # type: (...) -> Optional[ContextManager[Scope]] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - - Pushes a new layer on the scope stack. - - :param callback: If provided, this method pushes a scope, calls - `callback`, and pops the scope again. - - :returns: If no `callback` is provided, a context manager that should - be used to pop the scope again. - """ - if callback is not None: - with self.push_scope() as scope: - callback(scope) - return None - - return _ScopeManager(self) - - def pop_scope_unsafe(self): - # type: () -> Tuple[Optional[Client], Scope] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - - Pops a scope layer from the stack. - - Try to use the context manager :py:meth:`push_scope` instead. - """ - rv = self._stack.pop() - assert self._stack, "stack must have at least one layer" - return rv - - @overload - def configure_scope( - self, callback=None # type: Optional[None] - ): - # type: (...) -> ContextManager[Scope] - pass - - @overload - def configure_scope( # noqa: F811 - self, callback # type: Callable[[Scope], None] - ): - # type: (...) -> None - pass - - def configure_scope( # noqa - self, - callback=None, # type: Optional[Callable[[Scope], None]] - continue_trace=True, # type: bool - ): - # type: (...) -> Optional[ContextManager[Scope]] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - - Reconfigures the scope. - - :param callback: If provided, call the callback with the current scope. - - :returns: If no callback is provided, returns a context manager that returns the scope. - """ - scope = get_isolation_scope() - - if continue_trace: - scope.generate_propagation_context() - - if callback is not None: - # TODO: used to return None when client is None. Check if this changes behavior. - callback(scope) - - return None - - @contextmanager - def inner(): - # type: () -> Generator[Scope, None, None] - yield scope - - return inner() - - def start_session( - self, session_mode="application" # type: str - ): - # type: (...) -> None - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.start_session` instead. - - Starts a new session. - """ - get_isolation_scope().start_session( - session_mode=session_mode, - ) - - def end_session(self): - # type: (...) -> None - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.end_session` instead. - - Ends the current session if there is one. - """ - get_isolation_scope().end_session() - - def stop_auto_session_tracking(self): - # type: (...) -> None - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.stop_auto_session_tracking` instead. - - Stops automatic session tracking. - - This temporarily session tracking for the current scope when called. - To resume session tracking call `resume_auto_session_tracking`. - """ - get_isolation_scope().stop_auto_session_tracking() - - def resume_auto_session_tracking(self): - # type: (...) -> None - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.resume_auto_session_tracking` instead. - - Resumes automatic session tracking for the current scope if - disabled earlier. This requires that generally automatic session - tracking is enabled. - """ - get_isolation_scope().resume_auto_session_tracking() - - def flush( - self, - timeout=None, # type: Optional[float] - callback=None, # type: Optional[Callable[[int, float], None]] - ): - # type: (...) -> None - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.client._Client.flush` instead. - - Alias for :py:meth:`sentry_sdk.client._Client.flush` - """ - return get_client().flush(timeout=timeout, callback=callback) - - def get_traceparent(self): - # type: () -> Optional[str] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.get_traceparent` instead. - - Returns the traceparent either from the active span or from the scope. - """ - current_scope = get_current_scope() - traceparent = current_scope.get_traceparent() - - if traceparent is None: - isolation_scope = get_isolation_scope() - traceparent = isolation_scope.get_traceparent() - - return traceparent - - def get_baggage(self): - # type: () -> Optional[str] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.get_baggage` instead. - - Returns Baggage either from the active span or from the scope. - """ - current_scope = get_current_scope() - baggage = current_scope.get_baggage() - - if baggage is None: - isolation_scope = get_isolation_scope() - baggage = isolation_scope.get_baggage() - - if baggage is not None: - return baggage.serialize() - - return None - - def iter_trace_propagation_headers(self, span=None): - # type: (Optional[Span]) -> Generator[Tuple[str, str], None, None] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.iter_trace_propagation_headers` instead. - - Return HTTP headers which allow propagation of trace data. Data taken - from the span representing the request, if available, or the current - span on the scope if not. - """ - return get_current_scope().iter_trace_propagation_headers( - span=span, - ) - - def trace_propagation_meta(self, span=None): - # type: (Optional[Span]) -> str - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.trace_propagation_meta` instead. - - Return meta tags which should be injected into HTML templates - to allow propagation of trace information. - """ - if span is not None: - logger.warning( - "The parameter `span` in trace_propagation_meta() is deprecated and will be removed in the future." - ) - - return get_current_scope().trace_propagation_meta( - span=span, - ) - - -with _suppress_hub_deprecation_warning(): - # Suppress deprecation warning for the Hub here, since we still always - # import this module. - GLOBAL_HUB = Hub() -_local.set(GLOBAL_HUB) - - -# Circular imports -from sentry_sdk import scope diff --git a/sentry_sdk/integrations/__init__.py b/sentry_sdk/integrations/__init__.py index e2eadd523d..3a4804985a 100644 --- a/sentry_sdk/integrations/__init__.py +++ b/sentry_sdk/integrations/__init__.py @@ -1,3 +1,4 @@ +from __future__ import annotations from abc import ABC, abstractmethod from threading import Lock @@ -23,20 +24,20 @@ _installer_lock = Lock() # Set of all integration identifiers we have attempted to install -_processed_integrations = set() # type: Set[str] +_processed_integrations: Set[str] = set() # Set of all integration identifiers we have actually installed -_installed_integrations = set() # type: Set[str] +_installed_integrations: Set[str] = set() def _generate_default_integrations_iterator( - integrations, # type: List[str] - auto_enabling_integrations, # type: List[str] -): - # type: (...) -> Callable[[bool], Iterator[Type[Integration]]] + integrations: List[str], + auto_enabling_integrations: List[str], +) -> Callable[[bool], Iterator[Type[Integration]]]: - def iter_default_integrations(with_auto_enabling_integrations): - # type: (bool) -> Iterator[Type[Integration]] + def iter_default_integrations( + with_auto_enabling_integrations: bool, + ) -> Iterator[Type[Integration]]: """Returns an iterator of the default integration classes:""" from importlib import import_module @@ -131,10 +132,11 @@ def iter_default_integrations(with_auto_enabling_integrations): "celery": (4, 4, 7), "chalice": (1, 16, 0), "clickhouse_driver": (0, 2, 0), + "common": (1, 4, 0), # opentelemetry-sdk "cohere": (5, 4, 0), - "django": (1, 8), + "django": (2, 0), "dramatiq": (1, 9), - "falcon": (1, 4), + "falcon": (3, 0), "fastapi": (0, 79, 0), "flask": (1, 1, 4), "gql": (3, 4, 1), @@ -158,18 +160,20 @@ def iter_default_integrations(with_auto_enabling_integrations): "statsig": (0, 55, 3), "strawberry": (0, 209, 5), "tornado": (6, 0), + "trytond": (5, 0), "typer": (0, 15), "unleash": (6, 0, 1), } def setup_integrations( - integrations, - with_defaults=True, - with_auto_enabling_integrations=False, - disabled_integrations=None, -): - # type: (Sequence[Integration], bool, bool, Optional[Sequence[Union[type[Integration], Integration]]]) -> Dict[str, Integration] + integrations: Sequence[Integration], + with_defaults: bool = True, + with_auto_enabling_integrations: bool = False, + disabled_integrations: Optional[ + Sequence[Union[type[Integration], Integration]] + ] = None, +) -> Dict[str, Integration]: """ Given a list of integration instances, this installs them all. @@ -238,8 +242,11 @@ def setup_integrations( return integrations -def _check_minimum_version(integration, version, package=None): - # type: (type[Integration], Optional[tuple[int, ...]], Optional[str]) -> None +def _check_minimum_version( + integration: type[Integration], + version: Optional[tuple[int, ...]], + package: Optional[str] = None, +) -> None: package = package or integration.identifier if version is None: @@ -275,13 +282,12 @@ class Integration(ABC): install = None """Legacy method, do not implement.""" - identifier = None # type: str + identifier: str """String unique ID of integration type""" @staticmethod @abstractmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: """ Initialize the integration. diff --git a/sentry_sdk/integrations/_asgi_common.py b/sentry_sdk/integrations/_asgi_common.py index c16bbbcfe8..efa67bda05 100644 --- a/sentry_sdk/integrations/_asgi_common.py +++ b/sentry_sdk/integrations/_asgi_common.py @@ -1,3 +1,4 @@ +from __future__ import annotations import urllib from sentry_sdk.scope import should_send_default_pii @@ -10,18 +11,16 @@ from typing import Dict from typing import Optional from typing import Union - from typing_extensions import Literal from sentry_sdk.utils import AnnotatedValue -def _get_headers(asgi_scope): - # type: (Any) -> Dict[str, str] +def _get_headers(asgi_scope: Any) -> Dict[str, str]: """ Extract headers from the ASGI scope, in the format that the Sentry protocol expects. """ - headers = {} # type: Dict[str, str] - for raw_key, raw_value in asgi_scope["headers"]: + headers: Dict[str, str] = {} + for raw_key, raw_value in asgi_scope.get("headers", {}): key = raw_key.decode("latin-1") value = raw_value.decode("latin-1") if key in headers: @@ -32,13 +31,16 @@ def _get_headers(asgi_scope): return headers -def _get_url(asgi_scope, default_scheme, host): - # type: (Dict[str, Any], Literal["ws", "http"], Optional[Union[AnnotatedValue, str]]) -> str +def _get_url( + asgi_scope: Dict[str, Any], + host: Optional[Union[AnnotatedValue, str]] = None, +) -> str: """ Extract URL from the ASGI scope, without also including the querystring. """ - scheme = asgi_scope.get("scheme", default_scheme) - + scheme = asgi_scope.get( + "scheme", "http" if asgi_scope.get("type") == "http" else "ws" + ) server = asgi_scope.get("server", None) path = asgi_scope.get("root_path", "") + asgi_scope.get("path", "") @@ -54,8 +56,7 @@ def _get_url(asgi_scope, default_scheme, host): return path -def _get_query(asgi_scope): - # type: (Any) -> Any +def _get_query(asgi_scope: Any) -> Any: """ Extract querystring from the ASGI scope, in the format that the Sentry protocol expects. """ @@ -65,8 +66,7 @@ def _get_query(asgi_scope): return urllib.parse.unquote(qs.decode("latin-1")) -def _get_ip(asgi_scope): - # type: (Any) -> str +def _get_ip(asgi_scope: Any) -> str: """ Extract IP Address from the ASGI scope based on request headers with fallback to scope client. """ @@ -84,12 +84,11 @@ def _get_ip(asgi_scope): return asgi_scope.get("client")[0] -def _get_request_data(asgi_scope): - # type: (Any) -> Dict[str, Any] +def _get_request_data(asgi_scope: Any) -> Dict[str, Any]: """ Returns data related to the HTTP request from the ASGI scope. """ - request_data = {} # type: Dict[str, Any] + request_data: Dict[str, Any] = {} ty = asgi_scope["type"] if ty in ("http", "websocket"): request_data["method"] = asgi_scope.get("method") @@ -97,9 +96,7 @@ def _get_request_data(asgi_scope): request_data["headers"] = headers = _filter_headers(_get_headers(asgi_scope)) request_data["query_string"] = _get_query(asgi_scope) - request_data["url"] = _get_url( - asgi_scope, "http" if ty == "http" else "ws", headers.get("host") - ) + request_data["url"] = _get_url(asgi_scope, headers.get("host")) client = asgi_scope.get("client") if client and should_send_default_pii(): diff --git a/sentry_sdk/integrations/_wsgi_common.py b/sentry_sdk/integrations/_wsgi_common.py index 48bc432887..625deb89a5 100644 --- a/sentry_sdk/integrations/_wsgi_common.py +++ b/sentry_sdk/integrations/_wsgi_common.py @@ -1,10 +1,10 @@ -from contextlib import contextmanager +from __future__ import annotations import json from copy import deepcopy import sentry_sdk from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.utils import AnnotatedValue, logger +from sentry_sdk.utils import AnnotatedValue, SENSITIVE_DATA_SUBSTITUTE try: from django.http.request import RawPostDataException @@ -16,12 +16,11 @@ if TYPE_CHECKING: from typing import Any from typing import Dict - from typing import Iterator from typing import Mapping from typing import MutableMapping from typing import Optional from typing import Union - from sentry_sdk._types import Event, HttpStatusCodeRange + from sentry_sdk._types import Event SENSITIVE_ENV_KEYS = ( @@ -52,15 +51,9 @@ ) -# This noop context manager can be replaced with "from contextlib import nullcontext" when we drop Python 3.6 support -@contextmanager -def nullcontext(): - # type: () -> Iterator[None] - yield - - -def request_body_within_bounds(client, content_length): - # type: (Optional[sentry_sdk.client.BaseClient], int) -> bool +def request_body_within_bounds( + client: Optional[sentry_sdk.client.BaseClient], content_length: int +) -> bool: if client is None: return False @@ -82,17 +75,15 @@ class RequestExtractor: # it. Only some child classes implement all methods that raise # NotImplementedError in this class. - def __init__(self, request): - # type: (Any) -> None + def __init__(self, request: Any) -> None: self.request = request - def extract_into_event(self, event): - # type: (Event) -> None + def extract_into_event(self, event: Event) -> None: client = sentry_sdk.get_client() if not client.is_active(): return - data = None # type: Optional[Union[AnnotatedValue, Dict[str, Any]]] + data: Optional[Union[AnnotatedValue, Dict[str, Any]]] = None content_length = self.content_length() request_info = event.get("request", {}) @@ -128,27 +119,22 @@ def extract_into_event(self, event): event["request"] = deepcopy(request_info) - def content_length(self): - # type: () -> int + def content_length(self) -> int: try: return int(self.env().get("CONTENT_LENGTH", 0)) except ValueError: return 0 - def cookies(self): - # type: () -> MutableMapping[str, Any] + def cookies(self) -> MutableMapping[str, Any]: raise NotImplementedError() - def raw_data(self): - # type: () -> Optional[Union[str, bytes]] + def raw_data(self) -> Optional[Union[str, bytes]]: raise NotImplementedError() - def form(self): - # type: () -> Optional[Dict[str, Any]] + def form(self) -> Optional[Dict[str, Any]]: raise NotImplementedError() - def parsed_body(self): - # type: () -> Optional[Dict[str, Any]] + def parsed_body(self) -> Optional[Dict[str, Any]]: try: form = self.form() except Exception: @@ -170,12 +156,10 @@ def parsed_body(self): return self.json() - def is_json(self): - # type: () -> bool + def is_json(self) -> bool: return _is_json_content_type(self.env().get("CONTENT_TYPE")) - def json(self): - # type: () -> Optional[Any] + def json(self) -> Optional[Any]: try: if not self.is_json(): return None @@ -199,21 +183,17 @@ def json(self): return None - def files(self): - # type: () -> Optional[Dict[str, Any]] + def files(self) -> Optional[Dict[str, Any]]: raise NotImplementedError() - def size_of_file(self, file): - # type: (Any) -> int + def size_of_file(self, file: Any) -> int: raise NotImplementedError() - def env(self): - # type: () -> Dict[str, Any] + def env(self) -> Dict[str, Any]: raise NotImplementedError() -def _is_json_content_type(ct): - # type: (Optional[str]) -> bool +def _is_json_content_type(ct: Optional[str]) -> bool: mt = (ct or "").split(";", 1)[0] return ( mt == "application/json" @@ -222,8 +202,9 @@ def _is_json_content_type(ct): ) -def _filter_headers(headers): - # type: (Mapping[str, str]) -> Mapping[str, Union[AnnotatedValue, str]] +def _filter_headers( + headers: Mapping[str, str], +) -> Mapping[str, Union[AnnotatedValue, str]]: if should_send_default_pii(): return headers @@ -237,35 +218,14 @@ def _filter_headers(headers): } -def _in_http_status_code_range(code, code_ranges): - # type: (object, list[HttpStatusCodeRange]) -> bool - for target in code_ranges: - if isinstance(target, int): - if code == target: - return True - continue - - try: - if code in target: - return True - except TypeError: - logger.warning( - "failed_request_status_codes has to be a list of integers or containers" - ) - - return False - +def _request_headers_to_span_attributes(headers: dict[str, str]) -> dict[str, str]: + attributes = {} -class HttpCodeRangeContainer: - """ - Wrapper to make it possible to use list[HttpStatusCodeRange] as a Container[int]. - Used for backwards compatibility with the old `failed_request_status_codes` option. - """ + headers = _filter_headers(headers) - def __init__(self, code_ranges): - # type: (list[HttpStatusCodeRange]) -> None - self._code_ranges = code_ranges + for header, value in headers.items(): + if isinstance(value, AnnotatedValue): + value = SENSITIVE_DATA_SUBSTITUTE + attributes[f"http.request.header.{header.lower()}"] = value - def __contains__(self, item): - # type: (object) -> bool - return _in_http_status_code_range(item, self._code_ranges) + return attributes diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index ad3202bf2c..f40b31fac2 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -1,10 +1,17 @@ +from __future__ import annotations import sys import weakref from functools import wraps import sentry_sdk -from sentry_sdk.api import continue_trace -from sentry_sdk.consts import OP, SPANSTATUS, SPANDATA +from sentry_sdk.consts import ( + OP, + SPANSTATUS, + SPANDATA, + BAGGAGE_HEADER_NAME, + SOURCE_FOR_STYLE, + TransactionSource, +) from sentry_sdk.integrations import ( _DEFAULT_FAILED_REQUEST_STATUS_CODES, _check_minimum_version, @@ -15,22 +22,20 @@ from sentry_sdk.sessions import track_session from sentry_sdk.integrations._wsgi_common import ( _filter_headers, + _request_headers_to_span_attributes, request_body_within_bounds, ) -from sentry_sdk.tracing import ( - BAGGAGE_HEADER_NAME, - SOURCE_FOR_STYLE, - TransactionSource, -) from sentry_sdk.tracing_utils import should_propagate_trace from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, event_from_exception, + http_client_status_to_breadcrumb_level, logger, parse_url, parse_version, reraise, + set_thread_info_from_span, transaction_from_function, HAS_REAL_CONTEXTVARS, CONTEXTVARS_ERROR_MESSAGE, @@ -67,6 +72,13 @@ TRANSACTION_STYLE_VALUES = ("handler_name", "method_and_path_pattern") +REQUEST_PROPERTY_TO_ATTRIBUTE = { + "query_string": "url.query", + "method": "http.request.method", + "scheme": "url.scheme", + "path": "url.path", +} + class AioHttpIntegration(Integration): identifier = "aiohttp" @@ -74,11 +86,10 @@ class AioHttpIntegration(Integration): def __init__( self, - transaction_style="handler_name", # type: str + transaction_style: str = "handler_name", *, - failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES, # type: Set[int] - ): - # type: (...) -> None + failed_request_status_codes: Set[int] = _DEFAULT_FAILED_REQUEST_STATUS_CODES, + ) -> None: if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" @@ -88,8 +99,7 @@ def __init__( self._failed_request_status_codes = failed_request_status_codes @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: version = parse_version(AIOHTTP_VERSION) _check_minimum_version(AioHttpIntegration, version) @@ -106,8 +116,9 @@ def setup_once(): old_handle = Application._handle - async def sentry_app_handle(self, request, *args, **kwargs): - # type: (Any, Request, *Any, **Any) -> Any + async def sentry_app_handle( + self: Any, request: Request, *args: Any, **kwargs: Any + ) -> Any: integration = sentry_sdk.get_client().get_integration(AioHttpIntegration) if integration is None: return await old_handle(self, request, *args, **kwargs) @@ -123,59 +134,47 @@ async def sentry_app_handle(self, request, *args, **kwargs): scope.add_event_processor(_make_request_processor(weak_request)) headers = dict(request.headers) - transaction = continue_trace( - headers, - op=OP.HTTP_SERVER, - # If this transaction name makes it to the UI, AIOHTTP's - # URL resolver did not find a route or died trying. - name="generic AIOHTTP request", - source=TransactionSource.ROUTE, - origin=AioHttpIntegration.origin, - ) - with sentry_sdk.start_transaction( - transaction, - custom_sampling_context={"aiohttp_request": request}, - ): - try: - response = await old_handle(self, request) - except HTTPException as e: - transaction.set_http_status(e.status_code) - - if ( - e.status_code - in integration._failed_request_status_codes - ): - _capture_exception() - - raise - except (asyncio.CancelledError, ConnectionResetError): - transaction.set_status(SPANSTATUS.CANCELLED) - raise - except Exception: - # This will probably map to a 500 but seems like we - # have no way to tell. Do not set span status. - reraise(*_capture_exception()) - - try: - # A valid response handler will return a valid response with a status. But, if the handler - # returns an invalid response (e.g. None), the line below will raise an AttributeError. - # Even though this is likely invalid, we need to handle this case to ensure we don't break - # the application. - response_status = response.status - except AttributeError: - pass - else: - transaction.set_http_status(response_status) - - return response + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span( + op=OP.HTTP_SERVER, + # If this transaction name makes it to the UI, AIOHTTP's + # URL resolver did not find a route or died trying. + name="generic AIOHTTP request", + source=TransactionSource.ROUTE, + origin=AioHttpIntegration.origin, + attributes=_prepopulate_attributes(request), + ) as span: + try: + response = await old_handle(self, request) + except HTTPException as e: + span.set_http_status(e.status_code) + + if ( + e.status_code + in integration._failed_request_status_codes + ): + _capture_exception() + + raise + except (asyncio.CancelledError, ConnectionResetError): + span.set_status(SPANSTATUS.CANCELLED) + raise + except Exception: + # This will probably map to a 500 but seems like we + # have no way to tell. Do not set span status. + reraise(*_capture_exception()) + + span.set_http_status(response.status) + return response Application._handle = sentry_app_handle old_urldispatcher_resolve = UrlDispatcher.resolve @wraps(old_urldispatcher_resolve) - async def sentry_urldispatcher_resolve(self, request): - # type: (UrlDispatcher, Request) -> UrlMappingMatchInfo + async def sentry_urldispatcher_resolve( + self: UrlDispatcher, request: Request + ) -> UrlMappingMatchInfo: rv = await old_urldispatcher_resolve(self, request) integration = sentry_sdk.get_client().get_integration(AioHttpIntegration) @@ -207,8 +206,7 @@ async def sentry_urldispatcher_resolve(self, request): old_client_session_init = ClientSession.__init__ @ensure_integration_enabled(AioHttpIntegration, old_client_session_init) - def init(*args, **kwargs): - # type: (Any, Any) -> None + def init(*args: Any, **kwargs: Any) -> None: client_trace_configs = list(kwargs.get("trace_configs") or ()) trace_config = create_trace_config() client_trace_configs.append(trace_config) @@ -219,11 +217,13 @@ def init(*args, **kwargs): ClientSession.__init__ = init -def create_trace_config(): - # type: () -> TraceConfig +def create_trace_config() -> TraceConfig: - async def on_request_start(session, trace_config_ctx, params): - # type: (ClientSession, SimpleNamespace, TraceRequestStartParams) -> None + async def on_request_start( + session: ClientSession, + trace_config_ctx: SimpleNamespace, + params: TraceRequestStartParams, + ) -> None: if sentry_sdk.get_client().get_integration(AioHttpIntegration) is None: return @@ -238,12 +238,21 @@ async def on_request_start(session, trace_config_ctx, params): name="%s %s" % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE), origin=AioHttpIntegration.origin, + only_as_child_span=True, ) - span.set_data(SPANDATA.HTTP_METHOD, method) + + data = { + SPANDATA.HTTP_METHOD: method, + } + set_thread_info_from_span(data, span) + if parsed_url is not None: - span.set_data("url", parsed_url.url) - span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) - span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) + data["url"] = parsed_url.url + data[SPANDATA.HTTP_QUERY] = parsed_url.query + data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment + + for key, value in data.items(): + span.set_attribute(key, value) client = sentry_sdk.get_client() @@ -268,15 +277,31 @@ async def on_request_start(session, trace_config_ctx, params): params.headers[key] = value trace_config_ctx.span = span + trace_config_ctx.span_data = data - async def on_request_end(session, trace_config_ctx, params): - # type: (ClientSession, SimpleNamespace, TraceRequestEndParams) -> None + async def on_request_end( + session: ClientSession, + trace_config_ctx: SimpleNamespace, + params: TraceRequestEndParams, + ) -> None: if trace_config_ctx.span is None: return + span_data = trace_config_ctx.span_data or {} + status_code = int(params.response.status) + span_data[SPANDATA.HTTP_STATUS_CODE] = status_code + span_data["reason"] = params.response.reason + + sentry_sdk.add_breadcrumb( + type="http", + category="httplib", + data=span_data, + level=http_client_status_to_breadcrumb_level(status_code), + ) + span = trace_config_ctx.span span.set_http_status(int(params.response.status)) - span.set_data("reason", params.response.reason) + span.set_attribute("reason", params.response.reason) span.finish() trace_config = TraceConfig() @@ -287,13 +312,13 @@ async def on_request_end(session, trace_config_ctx, params): return trace_config -def _make_request_processor(weak_request): - # type: (weakref.ReferenceType[Request]) -> EventProcessor +def _make_request_processor( + weak_request: weakref.ReferenceType[Request], +) -> EventProcessor: def aiohttp_processor( - event, # type: Event - hint, # type: dict[str, Tuple[type, BaseException, Any]] - ): - # type: (...) -> Event + event: Event, + hint: dict[str, Tuple[type, BaseException, Any]], + ) -> Event: request = weak_request() if request is None: return event @@ -322,8 +347,7 @@ def aiohttp_processor( return aiohttp_processor -def _capture_exception(): - # type: () -> ExcInfo +def _capture_exception() -> ExcInfo: exc_info = sys.exc_info() event, hint = event_from_exception( exc_info, @@ -337,8 +361,7 @@ def _capture_exception(): BODY_NOT_READ_MESSAGE = "[Can't show request body due to implementation details.]" -def get_aiohttp_request_data(request): - # type: (Request) -> Union[Optional[str], AnnotatedValue] +def get_aiohttp_request_data(request: Request) -> Union[Optional[str], AnnotatedValue]: bytes_body = request._read_bytes if bytes_body is not None: @@ -355,3 +378,29 @@ def get_aiohttp_request_data(request): # request has no body return None + + +def _prepopulate_attributes(request: Request) -> dict[str, Any]: + """Construct initial span attributes that can be used in traces sampler.""" + attributes = {} + + for prop, attr in REQUEST_PROPERTY_TO_ATTRIBUTE.items(): + if getattr(request, prop, None) is not None: + attributes[attr] = getattr(request, prop) + + if getattr(request, "host", None) is not None: + try: + host, port = request.host.split(":") + attributes["server.address"] = host + attributes["server.port"] = port + except ValueError: + attributes["server.address"] = request.host + + with capture_internal_exceptions(): + url = f"{request.scheme}://{request.host}{request.path}" # noqa: E231 + if request.query_string: + attributes["url.full"] = f"{url}?{request.query_string}" + + attributes.update(_request_headers_to_span_attributes(dict(request.headers))) + + return attributes diff --git a/sentry_sdk/integrations/anthropic.py b/sentry_sdk/integrations/anthropic.py index 1e1f9112a1..08d0ce32dc 100644 --- a/sentry_sdk/integrations/anthropic.py +++ b/sentry_sdk/integrations/anthropic.py @@ -1,3 +1,4 @@ +from __future__ import annotations from functools import wraps from typing import TYPE_CHECKING @@ -29,13 +30,11 @@ class AnthropicIntegration(Integration): identifier = "anthropic" origin = f"auto.ai.{identifier}" - def __init__(self, include_prompts=True): - # type: (AnthropicIntegration, bool) -> None + def __init__(self: AnthropicIntegration, include_prompts: bool = True) -> None: self.include_prompts = include_prompts @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: version = package_version("anthropic") _check_minimum_version(AnthropicIntegration, version) @@ -43,8 +42,7 @@ def setup_once(): AsyncMessages.create = _wrap_message_create_async(AsyncMessages.create) -def _capture_exception(exc): - # type: (Any) -> None +def _capture_exception(exc: Any) -> None: event, hint = event_from_exception( exc, client_options=sentry_sdk.get_client().options, @@ -53,8 +51,7 @@ def _capture_exception(exc): sentry_sdk.capture_event(event, hint=hint) -def _calculate_token_usage(result, span): - # type: (Messages, Span) -> None +def _calculate_token_usage(result: Messages, span: Span) -> None: input_tokens = 0 output_tokens = 0 if hasattr(result, "usage"): @@ -74,8 +71,7 @@ def _calculate_token_usage(result, span): ) -def _get_responses(content): - # type: (list[Any]) -> list[dict[str, Any]] +def _get_responses(content: list[Any]) -> list[dict[str, Any]]: """ Get JSON of a Anthropic responses. """ @@ -91,8 +87,12 @@ def _get_responses(content): return responses -def _collect_ai_data(event, input_tokens, output_tokens, content_blocks): - # type: (MessageStreamEvent, int, int, list[str]) -> tuple[int, int, list[str]] +def _collect_ai_data( + event: MessageStreamEvent, + input_tokens: int, + output_tokens: int, + content_blocks: list[str], +) -> tuple[int, int, list[str]]: """ Count token usage and collect content blocks from the AI streaming response. """ @@ -118,16 +118,19 @@ def _collect_ai_data(event, input_tokens, output_tokens, content_blocks): def _add_ai_data_to_span( - span, integration, input_tokens, output_tokens, content_blocks -): - # type: (Span, AnthropicIntegration, int, int, list[str]) -> None + span: Span, + integration: AnthropicIntegration, + input_tokens: int, + output_tokens: int, + content_blocks: list[str], +) -> None: """ Add token usage and content blocks from the AI streaming response to the span. """ with capture_internal_exceptions(): if should_send_default_pii() and integration.include_prompts: complete_message = "".join(content_blocks) - span.set_data( + span.set_attribute( SPANDATA.AI_RESPONSES, [{"type": "text", "text": complete_message}], ) @@ -138,11 +141,10 @@ def _add_ai_data_to_span( output_tokens=output_tokens, total_tokens=total_tokens, ) - span.set_data(SPANDATA.AI_STREAMING, True) + span.set_attribute(SPANDATA.AI_STREAMING, True) -def _sentry_patched_create_common(f, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any +def _sentry_patched_create_common(f: Any, *args: Any, **kwargs: Any) -> Any: integration = kwargs.pop("integration") if integration is None: return f(*args, **kwargs) @@ -159,6 +161,7 @@ def _sentry_patched_create_common(f, *args, **kwargs): op=OP.ANTHROPIC_MESSAGES_CREATE, description="Anthropic messages create", origin=AnthropicIntegration.origin, + only_as_child_span=True, ) span.__enter__() @@ -169,15 +172,17 @@ def _sentry_patched_create_common(f, *args, **kwargs): model = kwargs.get("model") with capture_internal_exceptions(): - span.set_data(SPANDATA.AI_MODEL_ID, model) - span.set_data(SPANDATA.AI_STREAMING, False) + span.set_attribute(SPANDATA.AI_MODEL_ID, model) + span.set_attribute(SPANDATA.AI_STREAMING, False) if should_send_default_pii() and integration.include_prompts: - span.set_data(SPANDATA.AI_INPUT_MESSAGES, messages) + span.set_attribute(SPANDATA.AI_INPUT_MESSAGES, messages) if hasattr(result, "content"): if should_send_default_pii() and integration.include_prompts: - span.set_data(SPANDATA.AI_RESPONSES, _get_responses(result.content)) + span.set_attribute( + SPANDATA.AI_RESPONSES, _get_responses(result.content) + ) _calculate_token_usage(result, span) span.__exit__(None, None, None) @@ -185,11 +190,10 @@ def _sentry_patched_create_common(f, *args, **kwargs): elif hasattr(result, "_iterator"): old_iterator = result._iterator - def new_iterator(): - # type: () -> Iterator[MessageStreamEvent] + def new_iterator() -> Iterator[MessageStreamEvent]: input_tokens = 0 output_tokens = 0 - content_blocks = [] # type: list[str] + content_blocks: list[str] = [] for event in old_iterator: input_tokens, output_tokens, content_blocks = _collect_ai_data( @@ -202,11 +206,10 @@ def new_iterator(): ) span.__exit__(None, None, None) - async def new_iterator_async(): - # type: () -> AsyncIterator[MessageStreamEvent] + async def new_iterator_async() -> AsyncIterator[MessageStreamEvent]: input_tokens = 0 output_tokens = 0 - content_blocks = [] # type: list[str] + content_blocks: list[str] = [] async for event in old_iterator: input_tokens, output_tokens, content_blocks = _collect_ai_data( @@ -225,16 +228,14 @@ async def new_iterator_async(): result._iterator = new_iterator() else: - span.set_data("unknown_response", True) + span.set_attribute("unknown_response", True) span.__exit__(None, None, None) return result -def _wrap_message_create(f): - # type: (Any) -> Any - def _execute_sync(f, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any +def _wrap_message_create(f: Any) -> Any: + def _execute_sync(f: Any, *args: Any, **kwargs: Any) -> Any: gen = _sentry_patched_create_common(f, *args, **kwargs) try: @@ -254,8 +255,7 @@ def _execute_sync(f, *args, **kwargs): return e.value @wraps(f) - def _sentry_patched_create_sync(*args, **kwargs): - # type: (*Any, **Any) -> Any + def _sentry_patched_create_sync(*args: Any, **kwargs: Any) -> Any: integration = sentry_sdk.get_client().get_integration(AnthropicIntegration) kwargs["integration"] = integration @@ -264,10 +264,8 @@ def _sentry_patched_create_sync(*args, **kwargs): return _sentry_patched_create_sync -def _wrap_message_create_async(f): - # type: (Any) -> Any - async def _execute_async(f, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any +def _wrap_message_create_async(f: Any) -> Any: + async def _execute_async(f: Any, *args: Any, **kwargs: Any) -> Any: gen = _sentry_patched_create_common(f, *args, **kwargs) try: @@ -287,8 +285,7 @@ async def _execute_async(f, *args, **kwargs): return e.value @wraps(f) - async def _sentry_patched_create_async(*args, **kwargs): - # type: (*Any, **Any) -> Any + async def _sentry_patched_create_async(*args: Any, **kwargs: Any) -> Any: integration = sentry_sdk.get_client().get_integration(AnthropicIntegration) kwargs["integration"] = integration diff --git a/sentry_sdk/integrations/argv.py b/sentry_sdk/integrations/argv.py index 315feefb4a..bf139bb219 100644 --- a/sentry_sdk/integrations/argv.py +++ b/sentry_sdk/integrations/argv.py @@ -1,3 +1,4 @@ +from __future__ import annotations import sys import sentry_sdk @@ -16,11 +17,9 @@ class ArgvIntegration(Integration): identifier = "argv" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: @add_global_event_processor - def processor(event, hint): - # type: (Event, Optional[Hint]) -> Optional[Event] + def processor(event: Event, hint: Optional[Hint]) -> Optional[Event]: if sentry_sdk.get_client().get_integration(ArgvIntegration) is not None: extra = event.setdefault("extra", {}) # If some event processor decided to set extra to e.g. an diff --git a/sentry_sdk/integrations/ariadne.py b/sentry_sdk/integrations/ariadne.py index 1a95bc0145..77a3aa2d9d 100644 --- a/sentry_sdk/integrations/ariadne.py +++ b/sentry_sdk/integrations/ariadne.py @@ -1,3 +1,4 @@ +from __future__ import annotations from importlib import import_module import sentry_sdk @@ -33,8 +34,7 @@ class AriadneIntegration(Integration): identifier = "ariadne" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: version = package_version("ariadne") _check_minimum_version(AriadneIntegration, version) @@ -43,15 +43,15 @@ def setup_once(): _patch_graphql() -def _patch_graphql(): - # type: () -> None +def _patch_graphql() -> None: old_parse_query = ariadne_graphql.parse_query old_handle_errors = ariadne_graphql.handle_graphql_errors old_handle_query_result = ariadne_graphql.handle_query_result @ensure_integration_enabled(AriadneIntegration, old_parse_query) - def _sentry_patched_parse_query(context_value, query_parser, data): - # type: (Optional[Any], Optional[QueryParser], Any) -> DocumentNode + def _sentry_patched_parse_query( + context_value: Optional[Any], query_parser: Optional[QueryParser], data: Any + ) -> DocumentNode: event_processor = _make_request_event_processor(data) sentry_sdk.get_isolation_scope().add_event_processor(event_processor) @@ -59,8 +59,9 @@ def _sentry_patched_parse_query(context_value, query_parser, data): return result @ensure_integration_enabled(AriadneIntegration, old_handle_errors) - def _sentry_patched_handle_graphql_errors(errors, *args, **kwargs): - # type: (List[GraphQLError], Any, Any) -> GraphQLResult + def _sentry_patched_handle_graphql_errors( + errors: List[GraphQLError], *args: Any, **kwargs: Any + ) -> GraphQLResult: result = old_handle_errors(errors, *args, **kwargs) event_processor = _make_response_event_processor(result[1]) @@ -83,8 +84,9 @@ def _sentry_patched_handle_graphql_errors(errors, *args, **kwargs): return result @ensure_integration_enabled(AriadneIntegration, old_handle_query_result) - def _sentry_patched_handle_query_result(result, *args, **kwargs): - # type: (Any, Any, Any) -> GraphQLResult + def _sentry_patched_handle_query_result( + result: Any, *args: Any, **kwargs: Any + ) -> GraphQLResult: query_result = old_handle_query_result(result, *args, **kwargs) event_processor = _make_response_event_processor(query_result[1]) @@ -111,12 +113,10 @@ def _sentry_patched_handle_query_result(result, *args, **kwargs): ariadne_graphql.handle_query_result = _sentry_patched_handle_query_result # type: ignore -def _make_request_event_processor(data): - # type: (GraphQLSchema) -> EventProcessor +def _make_request_event_processor(data: GraphQLSchema) -> EventProcessor: """Add request data and api_target to events.""" - def inner(event, hint): - # type: (Event, dict[str, Any]) -> Event + def inner(event: Event, hint: dict[str, Any]) -> Event: if not isinstance(data, dict): return event @@ -143,12 +143,10 @@ def inner(event, hint): return inner -def _make_response_event_processor(response): - # type: (Dict[str, Any]) -> EventProcessor +def _make_response_event_processor(response: Dict[str, Any]) -> EventProcessor: """Add response data to the event's response context.""" - def inner(event, hint): - # type: (Event, dict[str, Any]) -> Event + def inner(event: Event, hint: dict[str, Any]) -> Event: with capture_internal_exceptions(): if should_send_default_pii() and response.get("errors"): contexts = event.setdefault("contexts", {}) diff --git a/sentry_sdk/integrations/arq.py b/sentry_sdk/integrations/arq.py index b0b3d3f03e..cdadd2030b 100644 --- a/sentry_sdk/integrations/arq.py +++ b/sentry_sdk/integrations/arq.py @@ -1,3 +1,4 @@ +from __future__ import annotations import sys import sentry_sdk @@ -5,7 +6,7 @@ from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import Transaction, TransactionSource +from sentry_sdk.tracing import TransactionSource from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, @@ -37,14 +38,15 @@ ARQ_CONTROL_FLOW_EXCEPTIONS = (JobExecutionFailed, Retry, RetryJob) +DEFAULT_TRANSACTION_NAME = "unknown arq task" + class ArqIntegration(Integration): identifier = "arq" origin = f"auto.queue.{identifier}" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: try: if isinstance(ARQ_VERSION, str): @@ -64,19 +66,22 @@ def setup_once(): ignore_logger("arq.worker") -def patch_enqueue_job(): - # type: () -> None +def patch_enqueue_job() -> None: old_enqueue_job = ArqRedis.enqueue_job original_kwdefaults = old_enqueue_job.__kwdefaults__ - async def _sentry_enqueue_job(self, function, *args, **kwargs): - # type: (ArqRedis, str, *Any, **Any) -> Optional[Job] + async def _sentry_enqueue_job( + self: ArqRedis, function: str, *args: Any, **kwargs: Any + ) -> Optional[Job]: integration = sentry_sdk.get_client().get_integration(ArqIntegration) if integration is None: return await old_enqueue_job(self, function, *args, **kwargs) with sentry_sdk.start_span( - op=OP.QUEUE_SUBMIT_ARQ, name=function, origin=ArqIntegration.origin + op=OP.QUEUE_SUBMIT_ARQ, + name=function, + origin=ArqIntegration.origin, + only_as_child_span=True, ): return await old_enqueue_job(self, function, *args, **kwargs) @@ -84,44 +89,47 @@ async def _sentry_enqueue_job(self, function, *args, **kwargs): ArqRedis.enqueue_job = _sentry_enqueue_job -def patch_run_job(): - # type: () -> None +def patch_run_job() -> None: old_run_job = Worker.run_job - async def _sentry_run_job(self, job_id, score): - # type: (Worker, str, int) -> None + async def _sentry_run_job(self: Worker, job_id: str, score: int) -> None: integration = sentry_sdk.get_client().get_integration(ArqIntegration) if integration is None: return await old_run_job(self, job_id, score) with sentry_sdk.isolation_scope() as scope: scope._name = "arq" + scope.set_transaction_name( + DEFAULT_TRANSACTION_NAME, + source=TransactionSource.TASK, + ) scope.clear_breadcrumbs() - transaction = Transaction( - name="unknown arq task", - status="ok", + with sentry_sdk.start_span( op=OP.QUEUE_TASK_ARQ, + name=DEFAULT_TRANSACTION_NAME, source=TransactionSource.TASK, origin=ArqIntegration.origin, - ) + ) as span: + return_value = await old_run_job(self, job_id, score) + + if span.status is None: + span.set_status(SPANSTATUS.OK) - with sentry_sdk.start_transaction(transaction): - return await old_run_job(self, job_id, score) + return return_value Worker.run_job = _sentry_run_job -def _capture_exception(exc_info): - # type: (ExcInfo) -> None +def _capture_exception(exc_info: ExcInfo) -> None: scope = sentry_sdk.get_current_scope() - if scope.transaction is not None: + if scope.root_span is not None: if exc_info[0] in ARQ_CONTROL_FLOW_EXCEPTIONS: - scope.transaction.set_status(SPANSTATUS.ABORTED) + scope.root_span.set_status(SPANSTATUS.ABORTED) return - scope.transaction.set_status(SPANSTATUS.INTERNAL_ERROR) + scope.root_span.set_status(SPANSTATUS.INTERNAL_ERROR) event, hint = event_from_exception( exc_info, @@ -131,15 +139,15 @@ def _capture_exception(exc_info): sentry_sdk.capture_event(event, hint=hint) -def _make_event_processor(ctx, *args, **kwargs): - # type: (Dict[Any, Any], *Any, **Any) -> EventProcessor - def event_processor(event, hint): - # type: (Event, Hint) -> Optional[Event] +def _make_event_processor( + ctx: Dict[Any, Any], *args: Any, **kwargs: Any +) -> EventProcessor: + def event_processor(event: Event, hint: Hint) -> Optional[Event]: with capture_internal_exceptions(): scope = sentry_sdk.get_current_scope() - if scope.transaction is not None: - scope.transaction.name = ctx["job_name"] + if scope.root_span is not None: + scope.root_span.name = ctx["job_name"] event["transaction"] = ctx["job_name"] tags = event.setdefault("tags", {}) @@ -162,11 +170,9 @@ def event_processor(event, hint): return event_processor -def _wrap_coroutine(name, coroutine): - # type: (str, WorkerCoroutine) -> WorkerCoroutine +def _wrap_coroutine(name: str, coroutine: WorkerCoroutine) -> WorkerCoroutine: - async def _sentry_coroutine(ctx, *args, **kwargs): - # type: (Dict[Any, Any], *Any, **Any) -> Any + async def _sentry_coroutine(ctx: Dict[Any, Any], *args: Any, **kwargs: Any) -> Any: integration = sentry_sdk.get_client().get_integration(ArqIntegration) if integration is None: return await coroutine(ctx, *args, **kwargs) @@ -187,13 +193,11 @@ async def _sentry_coroutine(ctx, *args, **kwargs): return _sentry_coroutine -def patch_create_worker(): - # type: () -> None +def patch_create_worker() -> None: old_create_worker = arq.worker.create_worker @ensure_integration_enabled(ArqIntegration, old_create_worker) - def _sentry_create_worker(*args, **kwargs): - # type: (*Any, **Any) -> Worker + def _sentry_create_worker(*args: Any, **kwargs: Any) -> Worker: settings_cls = args[0] if isinstance(settings_cls, dict): @@ -232,16 +236,14 @@ def _sentry_create_worker(*args, **kwargs): arq.worker.create_worker = _sentry_create_worker -def _get_arq_function(func): - # type: (Union[str, Function, WorkerCoroutine]) -> Function +def _get_arq_function(func: Union[str, Function, WorkerCoroutine]) -> Function: arq_func = arq.worker.func(func) arq_func.coroutine = _wrap_coroutine(arq_func.name, arq_func.coroutine) return arq_func -def _get_arq_cron_job(cron_job): - # type: (CronJob) -> CronJob +def _get_arq_cron_job(cron_job: CronJob) -> CronJob: cron_job.coroutine = _wrap_coroutine(cron_job.name, cron_job.coroutine) return cron_job diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index 1b020ebbc0..29a42afe3c 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -4,31 +4,29 @@ Based on Tom Christie's `sentry-asgi `. """ +from __future__ import annotations import asyncio import inspect from copy import deepcopy from functools import partial import sentry_sdk -from sentry_sdk.api import continue_trace -from sentry_sdk.consts import OP +from sentry_sdk.consts import OP, SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.integrations._asgi_common import ( _get_headers, + _get_query, _get_request_data, _get_url, ) from sentry_sdk.integrations._wsgi_common import ( DEFAULT_HTTP_METHODS_TO_CAPTURE, - nullcontext, + _request_headers_to_span_attributes, ) from sentry_sdk.sessions import track_session -from sentry_sdk.tracing import ( - SOURCE_FOR_STYLE, - TransactionSource, -) from sentry_sdk.utils import ( ContextVar, + capture_internal_exceptions, event_from_exception, HAS_REAL_CONTEXTVARS, CONTEXTVARS_ERROR_MESSAGE, @@ -36,7 +34,6 @@ transaction_from_function, _get_installed_modules, ) -from sentry_sdk.tracing import Transaction from typing import TYPE_CHECKING @@ -56,9 +53,16 @@ TRANSACTION_STYLE_VALUES = ("endpoint", "url") +ASGI_SCOPE_PROPERTY_TO_ATTRIBUTE = { + "http_version": "network.protocol.version", + "method": "http.request.method", + "path": "url.path", + "scheme": "url.scheme", + "type": "network.protocol.name", +} + -def _capture_exception(exc, mechanism_type="asgi"): - # type: (Any, str) -> None +def _capture_exception(exc: Any, mechanism_type: str = "asgi") -> None: event, hint = event_from_exception( exc, @@ -68,8 +72,7 @@ def _capture_exception(exc, mechanism_type="asgi"): sentry_sdk.capture_event(event, hint=hint) -def _looks_like_asgi3(app): - # type: (Any) -> bool +def _looks_like_asgi3(app: Any) -> bool: """ Try to figure out if an application object supports ASGI3. @@ -96,14 +99,13 @@ class SentryAsgiMiddleware: def __init__( self, - app, # type: Any - unsafe_context_data=False, # type: bool - transaction_style="endpoint", # type: str - mechanism_type="asgi", # type: str - span_origin="manual", # type: str - http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: Tuple[str, ...] - ): - # type: (...) -> None + app: Any, + unsafe_context_data: bool = False, + transaction_style: str = "endpoint", + mechanism_type: str = "asgi", + span_origin: Optional[str] = None, + http_methods_to_capture: Tuple[str, ...] = DEFAULT_HTTP_METHODS_TO_CAPTURE, + ) -> None: """ Instrument an ASGI application with Sentry. Provides HTTP/websocket data to sent events and basic handling for exceptions bubbling up @@ -141,56 +143,79 @@ def __init__( self.http_methods_to_capture = http_methods_to_capture if _looks_like_asgi3(app): - self.__call__ = self._run_asgi3 # type: Callable[..., Any] + self.__call__: Callable[..., Any] = self._run_asgi3 else: self.__call__ = self._run_asgi2 - def _capture_lifespan_exception(self, exc): - # type: (Exception) -> None + def _capture_lifespan_exception(self, exc: Exception) -> None: """Capture exceptions raise in application lifespan handlers. The separate function is needed to support overriding in derived integrations that use different catching mechanisms. """ return _capture_exception(exc=exc, mechanism_type=self.mechanism_type) - def _capture_request_exception(self, exc): - # type: (Exception) -> None + def _capture_request_exception(self, exc: Exception) -> None: """Capture exceptions raised in incoming request handlers. The separate function is needed to support overriding in derived integrations that use different catching mechanisms. """ return _capture_exception(exc=exc, mechanism_type=self.mechanism_type) - def _run_asgi2(self, scope): - # type: (Any) -> Any - async def inner(receive, send): - # type: (Any, Any) -> Any + def _run_asgi2(self, scope: Any) -> Any: + async def inner(receive: Any, send: Any) -> Any: return await self._run_app(scope, receive, send, asgi_version=2) return inner - async def _run_asgi3(self, scope, receive, send): - # type: (Any, Any, Any) -> Any + async def _run_asgi3(self, scope: Any, receive: Any, send: Any) -> Any: return await self._run_app(scope, receive, send, asgi_version=3) - async def _run_app(self, scope, receive, send, asgi_version): - # type: (Any, Any, Any, int) -> Any + async def _run_original_app( + self, + scope: Any, + receive: Any, + send: Any, + asgi_version: Any, + is_lifespan: int = False, + ) -> Any: + try: + if asgi_version == 2: + return await self.app(scope)(receive, send) + else: + return await self.app(scope, receive, send) + + except Exception as exc: + if is_lifespan: + self._capture_lifespan_exception(exc) + else: + self._capture_request_exception(exc) + raise exc from None + + async def _run_app( + self, scope: Any, receive: Any, send: Any, asgi_version: int + ) -> Any: is_recursive_asgi_middleware = _asgi_middleware_applied.get(False) is_lifespan = scope["type"] == "lifespan" if is_recursive_asgi_middleware or is_lifespan: - try: - if asgi_version == 2: - return await self.app(scope)(receive, send) - else: - return await self.app(scope, receive, send) - - except Exception as exc: - self._capture_lifespan_exception(exc) - raise exc from None + return await self._run_original_app( + scope, receive, send, asgi_version, is_lifespan + ) _asgi_middleware_applied.set(True) try: with sentry_sdk.isolation_scope() as sentry_scope: + ( + transaction_name, + transaction_source, + ) = self._get_transaction_name_and_source( + self.transaction_style, + scope, + ) + sentry_scope.set_transaction_name( + transaction_name, + source=transaction_source, + ) + with track_session(sentry_scope, session_mode="request"): sentry_scope.clear_breadcrumbs() sentry_scope._name = "asgi" @@ -198,87 +223,58 @@ async def _run_app(self, scope, receive, send, asgi_version): sentry_scope.add_event_processor(processor) ty = scope["type"] - ( - transaction_name, - transaction_source, - ) = self._get_transaction_name_and_source( - self.transaction_style, - scope, - ) method = scope.get("method", "").upper() - transaction = None - if ty in ("http", "websocket"): - if ty == "websocket" or method in self.http_methods_to_capture: - transaction = continue_trace( - _get_headers(scope), - op="{}.server".format(ty), - name=transaction_name, - source=transaction_source, - origin=self.span_origin, - ) - logger.debug( - "[ASGI] Created transaction (continuing trace): %s", - transaction, - ) - else: - transaction = Transaction( - op=OP.HTTP_SERVER, + should_trace = ty == "websocket" or ( + ty == "http" and method in self.http_methods_to_capture + ) + if not should_trace: + return await self._run_original_app( + scope, receive, send, asgi_version + ) + + with sentry_sdk.continue_trace(_get_headers(scope)): + with sentry_sdk.start_span( + op=( + OP.WEBSOCKET_SERVER + if ty == "websocket" + else OP.HTTP_SERVER + ), name=transaction_name, source=transaction_source, origin=self.span_origin, - ) - logger.debug( - "[ASGI] Created transaction (new): %s", transaction - ) - - if transaction: - transaction.set_tag("asgi.type", ty) - logger.debug( - "[ASGI] Set transaction name and source on transaction: '%s' / '%s'", - transaction.name, - transaction.source, - ) - - with ( - sentry_sdk.start_transaction( - transaction, - custom_sampling_context={"asgi_scope": scope}, - ) - if transaction is not None - else nullcontext() - ): - logger.debug("[ASGI] Started transaction: %s", transaction) - try: - - async def _sentry_wrapped_send(event): - # type: (Dict[str, Any]) -> Any - if transaction is not None: - is_http_response = ( - event.get("type") == "http.response.start" - and "status" in event - ) - if is_http_response: - transaction.set_http_status(event["status"]) + attributes=_prepopulate_attributes(scope), + ) as span: + if span is not None: + logger.debug("[ASGI] Started transaction: %s", span) + span.set_tag("asgi.type", ty) + + async def _sentry_wrapped_send( + event: Dict[str, Any], + ) -> Any: + is_http_response = ( + event.get("type") == "http.response.start" + and span is not None + and "status" in event + ) + if is_http_response: + span.set_http_status(event["status"]) return await send(event) - if asgi_version == 2: - return await self.app(scope)( - receive, _sentry_wrapped_send - ) - else: - return await self.app( - scope, receive, _sentry_wrapped_send - ) - except Exception as exc: - self._capture_request_exception(exc) - raise exc from None + return await self._run_original_app( + scope, + receive, + _sentry_wrapped_send, + asgi_version, + is_lifespan, + ) finally: _asgi_middleware_applied.set(False) - def event_processor(self, event, hint, asgi_scope): - # type: (Event, Hint, Any) -> Optional[Event] + def event_processor( + self, event: Event, hint: Hint, asgi_scope: Any + ) -> Optional[Event]: request_data = event.get("request", {}) request_data.update(_get_request_data(asgi_scope)) event["request"] = deepcopy(request_data) @@ -317,11 +313,11 @@ def event_processor(self, event, hint, asgi_scope): # data to your liking it's recommended to use the `before_send` callback # for that. - def _get_transaction_name_and_source(self, transaction_style, asgi_scope): - # type: (SentryAsgiMiddleware, str, Any) -> Tuple[str, str] + def _get_transaction_name_and_source( + self: SentryAsgiMiddleware, transaction_style: str, asgi_scope: Any + ) -> Tuple[str, str]: name = None source = SOURCE_FOR_STYLE[transaction_style] - ty = asgi_scope.get("type") if transaction_style == "endpoint": endpoint = asgi_scope.get("endpoint") @@ -331,7 +327,7 @@ def _get_transaction_name_and_source(self, transaction_style, asgi_scope): if endpoint: name = transaction_from_function(endpoint) or "" else: - name = _get_url(asgi_scope, "http" if ty == "http" else "ws", host=None) + name = _get_url(asgi_scope) source = TransactionSource.URL elif transaction_style == "url": @@ -343,7 +339,7 @@ def _get_transaction_name_and_source(self, transaction_style, asgi_scope): if path is not None: name = path else: - name = _get_url(asgi_scope, "http" if ty == "http" else "ws", host=None) + name = _get_url(asgi_scope) source = TransactionSource.URL if name is None: @@ -352,3 +348,36 @@ def _get_transaction_name_and_source(self, transaction_style, asgi_scope): return name, source return name, source + + +def _prepopulate_attributes(scope: Any) -> dict[str, Any]: + """Unpack ASGI scope into serializable OTel attributes.""" + scope = scope or {} + + attributes = {} + for attr, key in ASGI_SCOPE_PROPERTY_TO_ATTRIBUTE.items(): + if scope.get(attr): + attributes[key] = scope[attr] + + for attr in ("client", "server"): + if scope.get(attr): + try: + host, port = scope[attr] + attributes[f"{attr}.address"] = host + if port is not None: + attributes[f"{attr}.port"] = port + except Exception: + pass + + with capture_internal_exceptions(): + full_url = _get_url(scope) + query = _get_query(scope) + if query: + attributes["url.query"] = query + full_url = f"{full_url}?{query}" + + attributes["url.full"] = full_url + + attributes.update(_request_headers_to_span_attributes(_get_headers(scope))) + + return attributes diff --git a/sentry_sdk/integrations/asyncio.py b/sentry_sdk/integrations/asyncio.py index ae580ca038..719cbba1a8 100644 --- a/sentry_sdk/integrations/asyncio.py +++ b/sentry_sdk/integrations/asyncio.py @@ -1,3 +1,4 @@ +from __future__ import annotations import sys import sentry_sdk @@ -11,7 +12,7 @@ except ImportError: raise DidNotEnable("asyncio not available") -from typing import cast, TYPE_CHECKING +from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Any @@ -20,8 +21,7 @@ from sentry_sdk._types import ExcInfo -def get_name(coro): - # type: (Any) -> str +def get_name(coro: Any) -> str: return ( getattr(coro, "__qualname__", None) or getattr(coro, "__name__", None) @@ -29,18 +29,19 @@ def get_name(coro): ) -def patch_asyncio(): - # type: () -> None +def patch_asyncio() -> None: orig_task_factory = None try: loop = asyncio.get_running_loop() orig_task_factory = loop.get_task_factory() - def _sentry_task_factory(loop, coro, **kwargs): - # type: (asyncio.AbstractEventLoop, Coroutine[Any, Any, Any], Any) -> asyncio.Future[Any] + def _sentry_task_factory( + loop: asyncio.AbstractEventLoop, + coro: Coroutine[Any, Any, Any], + **kwargs: Any, + ) -> asyncio.Future[Any]: - async def _task_with_sentry_span_creation(): - # type: () -> Any + async def _task_with_sentry_span_creation() -> Any: result = None with sentry_sdk.isolation_scope(): @@ -48,6 +49,7 @@ async def _task_with_sentry_span_creation(): op=OP.FUNCTION, name=get_name(coro), origin=AsyncioIntegration.origin, + only_as_child_span=True, ): try: result = await coro @@ -78,9 +80,8 @@ async def _task_with_sentry_span_creation(): # Set the task name to include the original coroutine's name try: - cast("asyncio.Task[Any]", task).set_name( - f"{get_name(coro)} (Sentry-wrapped)" - ) + if isinstance(task, asyncio.Task): + task.set_name(f"{get_name(coro)} (Sentry-wrapped)") except AttributeError: # set_name might not be available in all Python versions pass @@ -99,8 +100,7 @@ async def _task_with_sentry_span_creation(): ) -def _capture_exception(): - # type: () -> ExcInfo +def _capture_exception() -> ExcInfo: exc_info = sys.exc_info() client = sentry_sdk.get_client() @@ -122,6 +122,5 @@ class AsyncioIntegration(Integration): origin = f"auto.function.{identifier}" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: patch_asyncio() diff --git a/sentry_sdk/integrations/asyncpg.py b/sentry_sdk/integrations/asyncpg.py index b6b53f4668..2b492ba4cc 100644 --- a/sentry_sdk/integrations/asyncpg.py +++ b/sentry_sdk/integrations/asyncpg.py @@ -1,6 +1,6 @@ from __future__ import annotations import contextlib -from typing import Any, TypeVar, Callable, Awaitable, Iterator +from typing import Any, TypeVar, Callable, Awaitable, Iterator, Optional import sentry_sdk from sentry_sdk.consts import OP, SPANDATA @@ -8,6 +8,7 @@ from sentry_sdk.tracing import Span from sentry_sdk.tracing_utils import add_query_source, record_sql_queries from sentry_sdk.utils import ( + _serialize_span_attribute, ensure_integration_enabled, parse_version, capture_internal_exceptions, @@ -38,7 +39,6 @@ def setup_once() -> None: asyncpg.Connection.execute = _wrap_execute( asyncpg.Connection.execute, ) - asyncpg.Connection._execute = _wrap_connection_method( asyncpg.Connection._execute ) @@ -78,8 +78,8 @@ async def _inner(*args: Any, **kwargs: Any) -> T: ) as span: res = await f(*args, **kwargs) - with capture_internal_exceptions(): - add_query_source(span) + with capture_internal_exceptions(): + add_query_source(span) return res @@ -121,10 +121,13 @@ def _wrap_connection_method( async def _inner(*args: Any, **kwargs: Any) -> T: if sentry_sdk.get_client().get_integration(AsyncPGIntegration) is None: return await f(*args, **kwargs) + query = args[1] params_list = args[2] if len(args) > 2 else None + with _record(None, query, params_list, executemany=executemany) as span: - _set_db_data(span, args[0]) + data = _get_db_data(conn=args[0]) + _set_on_span(span, data) res = await f(*args, **kwargs) return res @@ -144,9 +147,10 @@ def _inner(*args: Any, **kwargs: Any) -> T: # noqa: N807 params_list, executemany=False, ) as span: - _set_db_data(span, args[0]) + data = _get_db_data(conn=args[0]) + _set_on_span(span, data) res = f(*args, **kwargs) - span.set_data("db.cursor", res) + span.set_attribute("db.cursor", _serialize_span_attribute(res)) return res @@ -158,29 +162,24 @@ async def _inner(*args: Any, **kwargs: Any) -> T: if sentry_sdk.get_client().get_integration(AsyncPGIntegration) is None: return await f(*args, **kwargs) - user = kwargs["params"].user - database = kwargs["params"].database - with sentry_sdk.start_span( op=OP.DB, name="connect", origin=AsyncPGIntegration.origin, + only_as_child_span=True, ) as span: - span.set_data(SPANDATA.DB_SYSTEM, "postgresql") - addr = kwargs.get("addr") - if addr: - try: - span.set_data(SPANDATA.SERVER_ADDRESS, addr[0]) - span.set_data(SPANDATA.SERVER_PORT, addr[1]) - except IndexError: - pass - span.set_data(SPANDATA.DB_NAME, database) - span.set_data(SPANDATA.DB_USER, user) + data = _get_db_data( + addr=kwargs.get("addr"), + database=kwargs["params"].database, + user=kwargs["params"].user, + ) + _set_on_span(span, data) with capture_internal_exceptions(): sentry_sdk.add_breadcrumb( - message="connect", category="query", data=span._data + message="connect", category="query", data=data ) + res = await f(*args, **kwargs) return res @@ -188,21 +187,37 @@ async def _inner(*args: Any, **kwargs: Any) -> T: return _inner -def _set_db_data(span: Span, conn: Any) -> None: - span.set_data(SPANDATA.DB_SYSTEM, "postgresql") +def _get_db_data( + conn: Any = None, + addr: Optional[tuple[str, ...]] = None, + database: Optional[str] = None, + user: Optional[str] = None, +) -> dict[str, str]: + if conn is not None: + addr = conn._addr + database = conn._params.database + user = conn._params.user + + data = { + SPANDATA.DB_SYSTEM: "postgresql", + } - addr = conn._addr if addr: try: - span.set_data(SPANDATA.SERVER_ADDRESS, addr[0]) - span.set_data(SPANDATA.SERVER_PORT, addr[1]) + data[SPANDATA.SERVER_ADDRESS] = addr[0] + data[SPANDATA.SERVER_PORT] = addr[1] except IndexError: pass - database = conn._params.database if database: - span.set_data(SPANDATA.DB_NAME, database) + data[SPANDATA.DB_NAME] = database - user = conn._params.user if user: - span.set_data(SPANDATA.DB_USER, user) + data[SPANDATA.DB_USER] = user + + return data + + +def _set_on_span(span: Span, data: dict[str, Any]) -> None: + for key, value in data.items(): + span.set_attribute(key, value) diff --git a/sentry_sdk/integrations/atexit.py b/sentry_sdk/integrations/atexit.py index dfc6d08e1a..de60d15dcc 100644 --- a/sentry_sdk/integrations/atexit.py +++ b/sentry_sdk/integrations/atexit.py @@ -1,3 +1,4 @@ +from __future__ import annotations import os import sys import atexit @@ -12,15 +13,13 @@ from typing import Optional -def default_callback(pending, timeout): - # type: (int, int) -> None +def default_callback(pending: int, timeout: int) -> None: """This is the default shutdown callback that is set on the options. It prints out a message to stderr that informs the user that some events are still pending and the process is waiting for them to flush out. """ - def echo(msg): - # type: (str) -> None + def echo(msg: str) -> None: sys.stderr.write(msg + "\n") echo("Sentry is attempting to send %i pending events" % pending) @@ -32,18 +31,15 @@ def echo(msg): class AtexitIntegration(Integration): identifier = "atexit" - def __init__(self, callback=None): - # type: (Optional[Any]) -> None + def __init__(self, callback: Optional[Any] = None) -> None: if callback is None: callback = default_callback self.callback = callback @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: @atexit.register - def _shutdown(): - # type: () -> None + def _shutdown() -> None: client = sentry_sdk.get_client() integration = client.get_integration(AtexitIntegration) diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index 4990fd6e6a..7d39cc3a78 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -1,3 +1,4 @@ +from __future__ import annotations import functools import json import re @@ -5,9 +6,9 @@ from copy import deepcopy from datetime import datetime, timedelta, timezone from os import environ +from urllib.parse import urlencode import sentry_sdk -from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import TransactionSource @@ -21,7 +22,10 @@ reraise, ) from sentry_sdk.integrations import Integration -from sentry_sdk.integrations._wsgi_common import _filter_headers +from sentry_sdk.integrations._wsgi_common import ( + _filter_headers, + _request_headers_to_span_attributes, +) from typing import TYPE_CHECKING @@ -40,11 +44,20 @@ MILLIS_TO_SECONDS = 1000.0 -def _wrap_init_error(init_error): - # type: (F) -> F +EVENT_TO_ATTRIBUTES = { + "httpMethod": "http.request.method", + "queryStringParameters": "url.query", + "path": "url.path", +} + +CONTEXT_TO_ATTRIBUTES = { + "function_name": "faas.name", +} + + +def _wrap_init_error(init_error: F) -> F: @ensure_integration_enabled(AwsLambdaIntegration, init_error) - def sentry_init_error(*args, **kwargs): - # type: (*Any, **Any) -> Any + def sentry_init_error(*args: Any, **kwargs: Any) -> Any: client = sentry_sdk.get_client() with capture_internal_exceptions(): @@ -72,11 +85,11 @@ def sentry_init_error(*args, **kwargs): return sentry_init_error # type: ignore -def _wrap_handler(handler): - # type: (F) -> F +def _wrap_handler(handler: F) -> F: @functools.wraps(handler) - def sentry_handler(aws_event, aws_context, *args, **kwargs): - # type: (Any, Any, *Any, **Any) -> Any + def sentry_handler( + aws_event: Any, aws_context: Any, *args: Any, **kwargs: Any + ) -> Any: # Per https://docs.aws.amazon.com/lambda/latest/dg/python-handler.html, # `event` here is *likely* a dictionary, but also might be a number of @@ -110,6 +123,9 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs): configured_time = aws_context.get_remaining_time_in_millis() with sentry_sdk.isolation_scope() as scope: + scope.set_transaction_name( + aws_context.function_name, source=TransactionSource.COMPONENT + ) timeout_thread = None with capture_internal_exceptions(): scope.clear_breadcrumbs() @@ -149,40 +165,33 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs): if not isinstance(headers, dict): headers = {} - transaction = continue_trace( - headers, - op=OP.FUNCTION_AWS, - name=aws_context.function_name, - source=TransactionSource.COMPONENT, - origin=AwsLambdaIntegration.origin, - ) - with sentry_sdk.start_transaction( - transaction, - custom_sampling_context={ - "aws_event": aws_event, - "aws_context": aws_context, - }, - ): - try: - return handler(aws_event, aws_context, *args, **kwargs) - except Exception: - exc_info = sys.exc_info() - sentry_event, hint = event_from_exception( - exc_info, - client_options=client.options, - mechanism={"type": "aws_lambda", "handled": False}, - ) - sentry_sdk.capture_event(sentry_event, hint=hint) - reraise(*exc_info) - finally: - if timeout_thread: - timeout_thread.stop() + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span( + op=OP.FUNCTION_AWS, + name=aws_context.function_name, + source=TransactionSource.COMPONENT, + origin=AwsLambdaIntegration.origin, + attributes=_prepopulate_attributes(request_data, aws_context), + ): + try: + return handler(aws_event, aws_context, *args, **kwargs) + except Exception: + exc_info = sys.exc_info() + sentry_event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "aws_lambda", "handled": False}, + ) + sentry_sdk.capture_event(sentry_event, hint=hint) + reraise(*exc_info) + finally: + if timeout_thread: + timeout_thread.stop() return sentry_handler # type: ignore -def _drain_queue(): - # type: () -> None +def _drain_queue() -> None: with capture_internal_exceptions(): client = sentry_sdk.get_client() integration = client.get_integration(AwsLambdaIntegration) @@ -196,13 +205,11 @@ class AwsLambdaIntegration(Integration): identifier = "aws_lambda" origin = f"auto.function.{identifier}" - def __init__(self, timeout_warning=False): - # type: (bool) -> None + def __init__(self, timeout_warning: bool = False) -> None: self.timeout_warning = timeout_warning @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: lambda_bootstrap = get_lambda_bootstrap() if not lambda_bootstrap: @@ -219,81 +226,45 @@ def setup_once(): ) return - pre_37 = hasattr(lambda_bootstrap, "handle_http_request") # Python 3.6 - - if pre_37: - old_handle_event_request = lambda_bootstrap.handle_event_request - - def sentry_handle_event_request(request_handler, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any - request_handler = _wrap_handler(request_handler) - return old_handle_event_request(request_handler, *args, **kwargs) + lambda_bootstrap.LambdaRuntimeClient.post_init_error = _wrap_init_error( + lambda_bootstrap.LambdaRuntimeClient.post_init_error + ) - lambda_bootstrap.handle_event_request = sentry_handle_event_request + old_handle_event_request = lambda_bootstrap.handle_event_request - old_handle_http_request = lambda_bootstrap.handle_http_request - - def sentry_handle_http_request(request_handler, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any - request_handler = _wrap_handler(request_handler) - return old_handle_http_request(request_handler, *args, **kwargs) - - lambda_bootstrap.handle_http_request = sentry_handle_http_request - - # Patch to_json to drain the queue. This should work even when the - # SDK is initialized inside of the handler - - old_to_json = lambda_bootstrap.to_json - - def sentry_to_json(*args, **kwargs): - # type: (*Any, **Any) -> Any - _drain_queue() - return old_to_json(*args, **kwargs) - - lambda_bootstrap.to_json = sentry_to_json - else: - lambda_bootstrap.LambdaRuntimeClient.post_init_error = _wrap_init_error( - lambda_bootstrap.LambdaRuntimeClient.post_init_error - ) - - old_handle_event_request = lambda_bootstrap.handle_event_request - - def sentry_handle_event_request( # type: ignore + def sentry_handle_event_request( # type: ignore + lambda_runtime_client, request_handler, *args, **kwargs + ): + request_handler = _wrap_handler(request_handler) + return old_handle_event_request( lambda_runtime_client, request_handler, *args, **kwargs - ): - request_handler = _wrap_handler(request_handler) - return old_handle_event_request( - lambda_runtime_client, request_handler, *args, **kwargs - ) + ) - lambda_bootstrap.handle_event_request = sentry_handle_event_request + lambda_bootstrap.handle_event_request = sentry_handle_event_request - # Patch the runtime client to drain the queue. This should work - # even when the SDK is initialized inside of the handler + # Patch the runtime client to drain the queue. This should work + # even when the SDK is initialized inside of the handler - def _wrap_post_function(f): - # type: (F) -> F - def inner(*args, **kwargs): - # type: (*Any, **Any) -> Any - _drain_queue() - return f(*args, **kwargs) + def _wrap_post_function(f: F) -> F: + def inner(*args: Any, **kwargs: Any) -> Any: + _drain_queue() + return f(*args, **kwargs) - return inner # type: ignore + return inner # type: ignore - lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = ( - _wrap_post_function( - lambda_bootstrap.LambdaRuntimeClient.post_invocation_result - ) + lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = ( + _wrap_post_function( + lambda_bootstrap.LambdaRuntimeClient.post_invocation_result ) - lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = ( - _wrap_post_function( - lambda_bootstrap.LambdaRuntimeClient.post_invocation_error - ) + ) + lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = ( + _wrap_post_function( + lambda_bootstrap.LambdaRuntimeClient.post_invocation_error ) + ) -def get_lambda_bootstrap(): - # type: () -> Optional[Any] +def get_lambda_bootstrap() -> Optional[Any]: # Python 3.7: If the bootstrap module is *already imported*, it is the # one we actually want to use (no idea what's in __main__) @@ -329,12 +300,14 @@ def get_lambda_bootstrap(): return None -def _make_request_event_processor(aws_event, aws_context, configured_timeout): - # type: (Any, Any, Any) -> EventProcessor +def _make_request_event_processor( + aws_event: Any, aws_context: Any, configured_timeout: Any +) -> EventProcessor: start_time = datetime.now(timezone.utc) - def event_processor(sentry_event, hint, start_time=start_time): - # type: (Event, Hint, datetime) -> Optional[Event] + def event_processor( + sentry_event: Event, hint: Hint, start_time: datetime = start_time + ) -> Optional[Event]: remaining_time_in_milis = aws_context.get_remaining_time_in_millis() exec_duration = configured_timeout - remaining_time_in_milis @@ -362,7 +335,7 @@ def event_processor(sentry_event, hint, start_time=start_time): request["url"] = _get_url(aws_event, aws_context) if "queryStringParameters" in aws_event: - request["query_string"] = aws_event["queryStringParameters"] + request["query_string"] = urlencode(aws_event["queryStringParameters"]) if "headers" in aws_event: request["headers"] = _filter_headers(aws_event["headers"]) @@ -397,12 +370,13 @@ def event_processor(sentry_event, hint, start_time=start_time): return event_processor -def _get_url(aws_event, aws_context): - # type: (Any, Any) -> str +def _get_url(aws_event: Any, aws_context: Any) -> str: path = aws_event.get("path", None) headers = aws_event.get("headers") - if headers is None: + # Some AWS Services (ie. EventBridge) set headers as a list + # or None, so we must ensure it is a dict + if not isinstance(headers, dict): headers = {} host = headers.get("Host", None) @@ -412,8 +386,7 @@ def _get_url(aws_event, aws_context): return "awslambda:///{}".format(aws_context.function_name) -def _get_cloudwatch_logs_url(aws_context, start_time): - # type: (Any, datetime) -> str +def _get_cloudwatch_logs_url(aws_context: Any, start_time: datetime) -> str: """ Generates a CloudWatchLogs console URL based on the context object @@ -444,8 +417,7 @@ def _get_cloudwatch_logs_url(aws_context, start_time): return url -def _parse_formatted_traceback(formatted_tb): - # type: (list[str]) -> list[dict[str, Any]] +def _parse_formatted_traceback(formatted_tb: list[str]) -> list[dict[str, Any]]: frames = [] for frame in formatted_tb: match = re.match(r'File "(.+)", line (\d+), in (.+)', frame.strip()) @@ -466,8 +438,7 @@ def _parse_formatted_traceback(formatted_tb): return frames -def _event_from_error_json(error_json): - # type: (dict[str, Any]) -> Event +def _event_from_error_json(error_json: dict[str, Any]) -> Event: """ Converts the error JSON from AWS Lambda into a Sentry error event. This is not a full fletched event, but better than nothing. @@ -475,7 +446,7 @@ def _event_from_error_json(error_json): This is an example of where AWS creates the error JSON: https://github.com/aws/aws-lambda-python-runtime-interface-client/blob/2.2.1/awslambdaric/bootstrap.py#L479 """ - event = { + event: Event = { "level": "error", "exception": { "values": [ @@ -494,6 +465,42 @@ def _event_from_error_json(error_json): } ], }, - } # type: Event + } return event + + +def _prepopulate_attributes(aws_event: Any, aws_context: Any) -> dict[str, Any]: + attributes = { + "cloud.provider": "aws", + } + + for prop, attr in EVENT_TO_ATTRIBUTES.items(): + if aws_event.get(prop) is not None: + if prop == "queryStringParameters": + attributes[attr] = urlencode(aws_event[prop]) + else: + attributes[attr] = aws_event[prop] + + for prop, attr in CONTEXT_TO_ATTRIBUTES.items(): + if getattr(aws_context, prop, None) is not None: + attributes[attr] = getattr(aws_context, prop) + + url = _get_url(aws_event, aws_context) + if url: + if aws_event.get("queryStringParameters"): + url += f"?{urlencode(aws_event['queryStringParameters'])}" + attributes["url.full"] = url + + headers = {} + if aws_event.get("headers") and isinstance(aws_event["headers"], dict): + headers = aws_event["headers"] + + if headers.get("X-Forwarded-Proto"): + attributes["network.protocol.name"] = headers["X-Forwarded-Proto"] + if headers.get("Host"): + attributes["server.address"] = headers["Host"] + + attributes.update(_request_headers_to_span_attributes(headers)) + + return attributes diff --git a/sentry_sdk/integrations/beam.py b/sentry_sdk/integrations/beam.py index a2e4553f5a..fd37111be2 100644 --- a/sentry_sdk/integrations/beam.py +++ b/sentry_sdk/integrations/beam.py @@ -1,3 +1,4 @@ +from __future__ import annotations import sys import types from functools import wraps @@ -35,8 +36,7 @@ class BeamIntegration(Integration): identifier = "beam" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: from apache_beam.transforms.core import DoFn, ParDo # type: ignore ignore_logger("root") @@ -52,8 +52,7 @@ def setup_once(): old_init = ParDo.__init__ - def sentry_init_pardo(self, fn, *args, **kwargs): - # type: (ParDo, Any, *Any, **Any) -> Any + def sentry_init_pardo(self: ParDo, fn: Any, *args: Any, **kwargs: Any) -> Any: # Do not monkey patch init twice if not getattr(self, "_sentry_is_patched", False): for func_name in function_patches: @@ -79,14 +78,12 @@ def sentry_init_pardo(self, fn, *args, **kwargs): ParDo.__init__ = sentry_init_pardo -def _wrap_inspect_call(cls, func_name): - # type: (Any, Any) -> Any +def _wrap_inspect_call(cls: Any, func_name: Any) -> Any: if not hasattr(cls, func_name): return None - def _inspect(self): - # type: (Any) -> Any + def _inspect(self: Any) -> Any: """ Inspect function overrides the way Beam gets argspec. """ @@ -113,15 +110,13 @@ def _inspect(self): return _inspect -def _wrap_task_call(func): - # type: (F) -> F +def _wrap_task_call(func: F) -> F: """ Wrap task call with a try catch to get exceptions. """ @wraps(func) - def _inner(*args, **kwargs): - # type: (*Any, **Any) -> Any + def _inner(*args: Any, **kwargs: Any) -> Any: try: gen = func(*args, **kwargs) except Exception: @@ -136,8 +131,7 @@ def _inner(*args, **kwargs): @ensure_integration_enabled(BeamIntegration) -def _capture_exception(exc_info): - # type: (ExcInfo) -> None +def _capture_exception(exc_info: ExcInfo) -> None: """ Send Beam exception to Sentry. """ @@ -151,8 +145,7 @@ def _capture_exception(exc_info): sentry_sdk.capture_event(event, hint=hint) -def raise_exception(): - # type: () -> None +def raise_exception() -> None: """ Raise an exception. """ @@ -162,8 +155,7 @@ def raise_exception(): reraise(*exc_info) -def _wrap_generator_call(gen): - # type: (Iterator[T]) -> Iterator[T] +def _wrap_generator_call(gen: Iterator[T]) -> Iterator[T]: """ Wrap the generator to handle any failures. """ diff --git a/sentry_sdk/integrations/boto3.py b/sentry_sdk/integrations/boto3.py index 0207341f1b..876ffe4399 100644 --- a/sentry_sdk/integrations/boto3.py +++ b/sentry_sdk/integrations/boto3.py @@ -1,9 +1,9 @@ +from __future__ import annotations from functools import partial import sentry_sdk from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable -from sentry_sdk.tracing import Span from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, @@ -19,6 +19,8 @@ from typing import Optional from typing import Type + from sentry_sdk.tracing import Span + try: from botocore import __version__ as BOTOCORE_VERSION # type: ignore from botocore.client import BaseClient # type: ignore @@ -33,15 +35,15 @@ class Boto3Integration(Integration): origin = f"auto.http.{identifier}" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: version = parse_version(BOTOCORE_VERSION) _check_minimum_version(Boto3Integration, version, "botocore") orig_init = BaseClient.__init__ - def sentry_patched_init(self, *args, **kwargs): - # type: (Type[BaseClient], *Any, **Any) -> None + def sentry_patched_init( + self: Type[BaseClient], *args: Any, **kwargs: Any + ) -> None: orig_init(self, *args, **kwargs) meta = self.meta service_id = meta.service_model.service_id.hyphenize() @@ -56,24 +58,31 @@ def sentry_patched_init(self, *args, **kwargs): @ensure_integration_enabled(Boto3Integration) -def _sentry_request_created(service_id, request, operation_name, **kwargs): - # type: (str, AWSRequest, str, **Any) -> None +def _sentry_request_created( + service_id: str, request: AWSRequest, operation_name: str, **kwargs: Any +) -> None: description = "aws.%s.%s" % (service_id, operation_name) span = sentry_sdk.start_span( op=OP.HTTP_CLIENT, name=description, origin=Boto3Integration.origin, + only_as_child_span=True, ) + data = { + SPANDATA.HTTP_METHOD: request.method, + } with capture_internal_exceptions(): parsed_url = parse_url(request.url, sanitize=False) - span.set_data("aws.request.url", parsed_url.url) - span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) - span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) + data["aws.request.url"] = parsed_url.url + data[SPANDATA.HTTP_QUERY] = parsed_url.query + data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment + + for key, value in data.items(): + span.set_attribute(key, value) span.set_tag("aws.service_id", service_id) span.set_tag("aws.operation_name", operation_name) - span.set_data(SPANDATA.HTTP_METHOD, request.method) # We do it in order for subsequent http calls/retries be # attached to this span. @@ -82,32 +91,41 @@ def _sentry_request_created(service_id, request, operation_name, **kwargs): # request.context is an open-ended data-structure # where we can add anything useful in request life cycle. request.context["_sentrysdk_span"] = span + request.context["_sentrysdk_span_data"] = data -def _sentry_after_call(context, parsed, **kwargs): - # type: (Dict[str, Any], Dict[str, Any], **Any) -> None - span = context.pop("_sentrysdk_span", None) # type: Optional[Span] +def _sentry_after_call( + context: Dict[str, Any], parsed: Dict[str, Any], **kwargs: Any +) -> None: + span: Optional[Span] = context.pop("_sentrysdk_span", None) # Span could be absent if the integration is disabled. if span is None: return - span.__exit__(None, None, None) + + span_data = context.pop("_sentrysdk_span_data", {}) + + sentry_sdk.add_breadcrumb( + type="http", + category="httplib", + data=span_data, + ) body = parsed.get("Body") if not isinstance(body, StreamingBody): + span.__exit__(None, None, None) return - streaming_span = span.start_child( + streaming_span = sentry_sdk.start_span( op=OP.HTTP_CLIENT_STREAM, - name=span.description, + name=span.name, origin=Boto3Integration.origin, + only_as_child_span=True, ) orig_read = body.read - orig_close = body.close - def sentry_streaming_body_read(*args, **kwargs): - # type: (*Any, **Any) -> bytes + def sentry_streaming_body_read(*args: Any, **kwargs: Any) -> bytes: try: ret = orig_read(*args, **kwargs) if not ret: @@ -119,19 +137,32 @@ def sentry_streaming_body_read(*args, **kwargs): body.read = sentry_streaming_body_read - def sentry_streaming_body_close(*args, **kwargs): - # type: (*Any, **Any) -> None + orig_close = body.close + + def sentry_streaming_body_close(*args: Any, **kwargs: Any) -> None: streaming_span.finish() orig_close(*args, **kwargs) body.close = sentry_streaming_body_close + span.__exit__(None, None, None) + -def _sentry_after_call_error(context, exception, **kwargs): - # type: (Dict[str, Any], Type[BaseException], **Any) -> None - span = context.pop("_sentrysdk_span", None) # type: Optional[Span] +def _sentry_after_call_error( + context: Dict[str, Any], exception: Type[BaseException], **kwargs: Any +) -> None: + span: Optional[Span] = context.pop("_sentrysdk_span", None) # Span could be absent if the integration is disabled. if span is None: return + + span_data = context.pop("_sentrysdk_span_data", {}) + + sentry_sdk.add_breadcrumb( + type="http", + category="httplib", + data=span_data, + ) + span.__exit__(type(exception), exception, None) diff --git a/sentry_sdk/integrations/bottle.py b/sentry_sdk/integrations/bottle.py index 8a9fc41208..cdc36f50d6 100644 --- a/sentry_sdk/integrations/bottle.py +++ b/sentry_sdk/integrations/bottle.py @@ -1,7 +1,8 @@ +from __future__ import annotations import functools import sentry_sdk -from sentry_sdk.tracing import SOURCE_FOR_STYLE +from sentry_sdk.consts import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, @@ -55,11 +56,10 @@ class BottleIntegration(Integration): def __init__( self, - transaction_style="endpoint", # type: str + transaction_style: str = "endpoint", *, - failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES, # type: Set[int] - ): - # type: (...) -> None + failed_request_status_codes: Set[int] = _DEFAULT_FAILED_REQUEST_STATUS_CODES, + ) -> None: if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( @@ -70,16 +70,16 @@ def __init__( self.failed_request_status_codes = failed_request_status_codes @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: version = parse_version(BOTTLE_VERSION) _check_minimum_version(BottleIntegration, version) old_app = Bottle.__call__ @ensure_integration_enabled(BottleIntegration, old_app) - def sentry_patched_wsgi_app(self, environ, start_response): - # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse + def sentry_patched_wsgi_app( + self: Any, environ: Dict[str, str], start_response: Callable[..., Any] + ) -> _ScopedResponse: middleware = SentryWsgiMiddleware( lambda *a, **kw: old_app(self, *a, **kw), span_origin=BottleIntegration.origin, @@ -92,8 +92,7 @@ def sentry_patched_wsgi_app(self, environ, start_response): old_handle = Bottle._handle @functools.wraps(old_handle) - def _patched_handle(self, environ): - # type: (Bottle, Dict[str, Any]) -> Any + def _patched_handle(self: Bottle, environ: Dict[str, Any]) -> Any: integration = sentry_sdk.get_client().get_integration(BottleIntegration) if integration is None: return old_handle(self, environ) @@ -112,16 +111,14 @@ def _patched_handle(self, environ): old_make_callback = Route._make_callback @functools.wraps(old_make_callback) - def patched_make_callback(self, *args, **kwargs): - # type: (Route, *object, **object) -> Any + def patched_make_callback(self: Route, *args: object, **kwargs: object) -> Any: prepared_callback = old_make_callback(self, *args, **kwargs) integration = sentry_sdk.get_client().get_integration(BottleIntegration) if integration is None: return prepared_callback - def wrapped_callback(*args, **kwargs): - # type: (*object, **object) -> Any + def wrapped_callback(*args: object, **kwargs: object) -> Any: try: res = prepared_callback(*args, **kwargs) except Exception as exception: @@ -142,38 +139,33 @@ def wrapped_callback(*args, **kwargs): class BottleRequestExtractor(RequestExtractor): - def env(self): - # type: () -> Dict[str, str] + def env(self) -> Dict[str, str]: return self.request.environ - def cookies(self): - # type: () -> Dict[str, str] + def cookies(self) -> Dict[str, str]: return self.request.cookies - def raw_data(self): - # type: () -> bytes + def raw_data(self) -> bytes: return self.request.body.read() - def form(self): - # type: () -> FormsDict + def form(self) -> FormsDict: if self.is_json(): return None return self.request.forms.decode() - def files(self): - # type: () -> Optional[Dict[str, str]] + def files(self) -> Optional[Dict[str, str]]: if self.is_json(): return None return self.request.files - def size_of_file(self, file): - # type: (FileUpload) -> int + def size_of_file(self, file: FileUpload) -> int: return file.content_length -def _set_transaction_name_and_source(event, transaction_style, request): - # type: (Event, str, Any) -> None +def _set_transaction_name_and_source( + event: Event, transaction_style: str, request: Any +) -> None: name = "" if transaction_style == "url": @@ -196,11 +188,11 @@ def _set_transaction_name_and_source(event, transaction_style, request): event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]} -def _make_request_event_processor(app, request, integration): - # type: (Bottle, LocalRequest, BottleIntegration) -> EventProcessor +def _make_request_event_processor( + app: Bottle, request: LocalRequest, integration: BottleIntegration +) -> EventProcessor: - def event_processor(event, hint): - # type: (Event, dict[str, Any]) -> Event + def event_processor(event: Event, hint: dict[str, Any]) -> Event: _set_transaction_name_and_source(event, integration.transaction_style, request) with capture_internal_exceptions(): @@ -211,8 +203,7 @@ def event_processor(event, hint): return event_processor -def _capture_exception(exception, handled): - # type: (BaseException, bool) -> None +def _capture_exception(exception: BaseException, handled: bool) -> None: event, hint = event_from_exception( exception, client_options=sentry_sdk.get_client().options, diff --git a/sentry_sdk/integrations/celery/__init__.py b/sentry_sdk/integrations/celery/__init__.py index b5601fc0f9..076a34c232 100644 --- a/sentry_sdk/integrations/celery/__init__.py +++ b/sentry_sdk/integrations/celery/__init__.py @@ -1,11 +1,11 @@ +from __future__ import annotations import sys from collections.abc import Mapping from functools import wraps import sentry_sdk from sentry_sdk import isolation_scope -from sentry_sdk.api import continue_trace -from sentry_sdk.consts import OP, SPANSTATUS, SPANDATA +from sentry_sdk.consts import OP, SPANSTATUS, SPANDATA, BAGGAGE_HEADER_NAME from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable from sentry_sdk.integrations.celery.beat import ( _patch_beat_apply_entry, @@ -14,7 +14,7 @@ ) from sentry_sdk.integrations.celery.utils import _now_seconds_since_epoch from sentry_sdk.integrations.logging import ignore_logger -from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, TransactionSource +from sentry_sdk.tracing import TransactionSource from sentry_sdk.tracing_utils import Baggage from sentry_sdk.utils import ( capture_internal_exceptions, @@ -63,11 +63,10 @@ class CeleryIntegration(Integration): def __init__( self, - propagate_traces=True, - monitor_beat_tasks=False, - exclude_beat_tasks=None, - ): - # type: (bool, bool, Optional[List[str]]) -> None + propagate_traces: bool = True, + monitor_beat_tasks: bool = False, + exclude_beat_tasks: Optional[List[str]] = None, + ) -> None: self.propagate_traces = propagate_traces self.monitor_beat_tasks = monitor_beat_tasks self.exclude_beat_tasks = exclude_beat_tasks @@ -77,8 +76,7 @@ def __init__( _setup_celery_beat_signals(monitor_beat_tasks) @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: _check_minimum_version(CeleryIntegration, CELERY_VERSION) _patch_build_tracer() @@ -98,22 +96,19 @@ def setup_once(): ignore_logger("celery.redirected") -def _set_status(status): - # type: (str) -> None +def _set_status(status: str) -> None: with capture_internal_exceptions(): - scope = sentry_sdk.get_current_scope() - if scope.span is not None: - scope.span.set_status(status) + span = sentry_sdk.get_current_span() + if span is not None: + span.set_status(status) -def _capture_exception(task, exc_info): - # type: (Any, ExcInfo) -> None +def _capture_exception(task: Any, exc_info: ExcInfo) -> None: client = sentry_sdk.get_client() if client.get_integration(CeleryIntegration) is None: return if isinstance(exc_info[1], CELERY_CONTROL_FLOW_EXCEPTIONS): - # ??? Doesn't map to anything _set_status("aborted") return @@ -131,10 +126,10 @@ def _capture_exception(task, exc_info): sentry_sdk.capture_event(event, hint=hint) -def _make_event_processor(task, uuid, args, kwargs, request=None): - # type: (Any, Any, Any, Any, Optional[Any]) -> EventProcessor - def event_processor(event, hint): - # type: (Event, Hint) -> Optional[Event] +def _make_event_processor( + task: Any, uuid: Any, args: Any, kwargs: Any, request: Optional[Any] = None +) -> EventProcessor: + def event_processor(event: Event, hint: Hint) -> Optional[Event]: with capture_internal_exceptions(): tags = event.setdefault("tags", {}) @@ -160,8 +155,9 @@ def event_processor(event, hint): return event_processor -def _update_celery_task_headers(original_headers, span, monitor_beat_tasks): - # type: (dict[str, Any], Optional[Span], bool) -> dict[str, Any] +def _update_celery_task_headers( + original_headers: dict[str, Any], span: Optional[Span], monitor_beat_tasks: bool +) -> dict[str, Any]: """ Updates the headers of the Celery task with the tracing information and eventually Sentry Crons monitoring information for beat tasks. @@ -235,20 +231,16 @@ def _update_celery_task_headers(original_headers, span, monitor_beat_tasks): class NoOpMgr: - def __enter__(self): - # type: () -> None + def __enter__(self) -> None: return None - def __exit__(self, exc_type, exc_value, traceback): - # type: (Any, Any, Any) -> None + def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None: return None -def _wrap_task_run(f): - # type: (F) -> F +def _wrap_task_run(f: F) -> F: @wraps(f) - def apply_async(*args, **kwargs): - # type: (*Any, **Any) -> Any + def apply_async(*args: Any, **kwargs: Any) -> Any: # Note: kwargs can contain headers=None, so no setdefault! # Unsure which backend though. integration = sentry_sdk.get_client().get_integration(CeleryIntegration) @@ -264,7 +256,7 @@ def apply_async(*args, **kwargs): return f(*args, **kwargs) if isinstance(args[0], Task): - task_name = args[0].name # type: str + task_name: str = args[0].name elif len(args) > 1 and isinstance(args[1], str): task_name = args[1] else: @@ -272,15 +264,16 @@ def apply_async(*args, **kwargs): task_started_from_beat = sentry_sdk.get_isolation_scope()._name == "celery-beat" - span_mgr = ( + span_mgr: Union[Span, NoOpMgr] = ( sentry_sdk.start_span( op=OP.QUEUE_SUBMIT_CELERY, name=task_name, origin=CeleryIntegration.origin, + only_as_child_span=True, ) if not task_started_from_beat else NoOpMgr() - ) # type: Union[Span, NoOpMgr] + ) with span_mgr as span: kwargs["headers"] = _update_celery_task_headers( @@ -291,8 +284,7 @@ def apply_async(*args, **kwargs): return apply_async # type: ignore -def _wrap_tracer(task, f): - # type: (Any, F) -> F +def _wrap_tracer(task: Any, f: F) -> F: # Need to wrap tracer for pushing the scope before prerun is sent, and # popping it after postrun is sent. @@ -302,51 +294,38 @@ def _wrap_tracer(task, f): # crashes. @wraps(f) @ensure_integration_enabled(CeleryIntegration, f) - def _inner(*args, **kwargs): - # type: (*Any, **Any) -> Any + def _inner(*args: Any, **kwargs: Any) -> Any: with isolation_scope() as scope: scope._name = "celery" scope.clear_breadcrumbs() + scope.set_transaction_name(task.name, source=TransactionSource.TASK) scope.add_event_processor(_make_event_processor(task, *args, **kwargs)) - transaction = None - # Celery task objects are not a thing to be trusted. Even # something such as attribute access can fail. - with capture_internal_exceptions(): - headers = args[3].get("headers") or {} - transaction = continue_trace( - headers, + headers = args[3].get("headers") or {} + + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span( op=OP.QUEUE_TASK_CELERY, - name="unknown celery task", + name=task.name, source=TransactionSource.TASK, origin=CeleryIntegration.origin, - ) - transaction.name = task.name - transaction.set_status(SPANSTATUS.OK) + # for some reason, args[1] is a list if non-empty but a + # tuple if empty + attributes=_prepopulate_attributes(task, list(args[1]), args[2]), + ) as root_span: + return_value = f(*args, **kwargs) - if transaction is None: - return f(*args, **kwargs) + if root_span.status is None: + root_span.set_status(SPANSTATUS.OK) - with sentry_sdk.start_transaction( - transaction, - custom_sampling_context={ - "celery_job": { - "task": task.name, - # for some reason, args[1] is a list if non-empty but a - # tuple if empty - "args": list(args[1]), - "kwargs": args[2], - } - }, - ): - return f(*args, **kwargs) + return return_value return _inner # type: ignore -def _set_messaging_destination_name(task, span): - # type: (Any, Span) -> None +def _set_messaging_destination_name(task: Any, span: Span) -> None: """Set "messaging.destination.name" tag for span""" with capture_internal_exceptions(): delivery_info = task.request.delivery_info @@ -355,11 +334,10 @@ def _set_messaging_destination_name(task, span): if delivery_info.get("exchange") == "" and routing_key is not None: # Empty exchange indicates the default exchange, meaning the tasks # are sent to the queue with the same name as the routing key. - span.set_data(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key) + span.set_attribute(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key) -def _wrap_task_call(task, f): - # type: (Any, F) -> F +def _wrap_task_call(task: Any, f: F) -> F: # Need to wrap task call because the exception is caught before we get to # see it. Also celery's reported stacktrace is untrustworthy. @@ -370,13 +348,13 @@ def _wrap_task_call(task, f): # to add @functools.wraps(f) here. # https://github.com/getsentry/sentry-python/issues/421 @ensure_integration_enabled(CeleryIntegration, f) - def _inner(*args, **kwargs): - # type: (*Any, **Any) -> Any + def _inner(*args: Any, **kwargs: Any) -> Any: try: with sentry_sdk.start_span( op=OP.QUEUE_PROCESS, name=task.name, origin=CeleryIntegration.origin, + only_as_child_span=True, ) as span: _set_messaging_destination_name(task, span) @@ -392,23 +370,26 @@ def _inner(*args, **kwargs): if latency is not None: latency *= 1000 # milliseconds - span.set_data(SPANDATA.MESSAGING_MESSAGE_RECEIVE_LATENCY, latency) + span.set_attribute( + SPANDATA.MESSAGING_MESSAGE_RECEIVE_LATENCY, latency + ) with capture_internal_exceptions(): - span.set_data(SPANDATA.MESSAGING_MESSAGE_ID, task.request.id) + span.set_attribute(SPANDATA.MESSAGING_MESSAGE_ID, task.request.id) with capture_internal_exceptions(): - span.set_data( + span.set_attribute( SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, task.request.retries ) with capture_internal_exceptions(): - span.set_data( + span.set_attribute( SPANDATA.MESSAGING_SYSTEM, task.app.connection().transport.driver_type, ) return f(*args, **kwargs) + except Exception: exc_info = sys.exc_info() with capture_internal_exceptions(): @@ -418,14 +399,12 @@ def _inner(*args, **kwargs): return _inner # type: ignore -def _patch_build_tracer(): - # type: () -> None +def _patch_build_tracer() -> None: import celery.app.trace as trace # type: ignore original_build_tracer = trace.build_tracer - def sentry_build_tracer(name, task, *args, **kwargs): - # type: (Any, Any, *Any, **Any) -> Any + def sentry_build_tracer(name: Any, task: Any, *args: Any, **kwargs: Any) -> Any: if not getattr(task, "_sentry_is_patched", False): # determine whether Celery will use __call__ or run and patch # accordingly @@ -444,20 +423,17 @@ def sentry_build_tracer(name, task, *args, **kwargs): trace.build_tracer = sentry_build_tracer -def _patch_task_apply_async(): - # type: () -> None +def _patch_task_apply_async() -> None: Task.apply_async = _wrap_task_run(Task.apply_async) -def _patch_celery_send_task(): - # type: () -> None +def _patch_celery_send_task() -> None: from celery import Celery Celery.send_task = _wrap_task_run(Celery.send_task) -def _patch_worker_exit(): - # type: () -> None +def _patch_worker_exit() -> None: # Need to flush queue before worker shutdown because a crashing worker will # call os._exit @@ -465,8 +441,7 @@ def _patch_worker_exit(): original_workloop = Worker.workloop - def sentry_workloop(*args, **kwargs): - # type: (*Any, **Any) -> Any + def sentry_workloop(*args: Any, **kwargs: Any) -> Any: try: return original_workloop(*args, **kwargs) finally: @@ -480,13 +455,11 @@ def sentry_workloop(*args, **kwargs): Worker.workloop = sentry_workloop -def _patch_producer_publish(): - # type: () -> None +def _patch_producer_publish() -> None: original_publish = Producer.publish @ensure_integration_enabled(CeleryIntegration, original_publish) - def sentry_publish(self, *args, **kwargs): - # type: (Producer, *Any, **Any) -> Any + def sentry_publish(self: Producer, *args: Any, **kwargs: Any) -> Any: kwargs_headers = kwargs.get("headers", {}) if not isinstance(kwargs_headers, Mapping): # Ensure kwargs_headers is a Mapping, so we can safely call get(). @@ -507,23 +480,40 @@ def sentry_publish(self, *args, **kwargs): op=OP.QUEUE_PUBLISH, name=task_name, origin=CeleryIntegration.origin, + only_as_child_span=True, ) as span: if task_id is not None: - span.set_data(SPANDATA.MESSAGING_MESSAGE_ID, task_id) + span.set_attribute(SPANDATA.MESSAGING_MESSAGE_ID, task_id) if exchange == "" and routing_key is not None: # Empty exchange indicates the default exchange, meaning messages are # routed to the queue with the same name as the routing key. - span.set_data(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key) + span.set_attribute(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key) if retries is not None: - span.set_data(SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, retries) + span.set_attribute(SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, retries) with capture_internal_exceptions(): - span.set_data( + span.set_attribute( SPANDATA.MESSAGING_SYSTEM, self.connection.transport.driver_type ) return original_publish(self, *args, **kwargs) Producer.publish = sentry_publish + + +def _prepopulate_attributes(task: Any, args: Any, kwargs: Any) -> dict[str, str]: + attributes = { + "celery.job.task": task.name, + } + + for i, arg in enumerate(args): + with capture_internal_exceptions(): + attributes[f"celery.job.args.{i}"] = str(arg) + + for kwarg, value in kwargs.items(): + with capture_internal_exceptions(): + attributes[f"celery.job.kwargs.{kwarg}"] = str(value) + + return attributes diff --git a/sentry_sdk/integrations/celery/beat.py b/sentry_sdk/integrations/celery/beat.py index 4b7e45e6f0..b0c28f7bc8 100644 --- a/sentry_sdk/integrations/celery/beat.py +++ b/sentry_sdk/integrations/celery/beat.py @@ -1,3 +1,4 @@ +from __future__ import annotations import sentry_sdk from sentry_sdk.crons import capture_checkin, MonitorStatus from sentry_sdk.integrations import DidNotEnable @@ -42,8 +43,7 @@ RedBeatScheduler = None -def _get_headers(task): - # type: (Task) -> dict[str, Any] +def _get_headers(task: Task) -> dict[str, Any]: headers = task.request.get("headers") or {} # flatten nested headers @@ -56,12 +56,13 @@ def _get_headers(task): return headers -def _get_monitor_config(celery_schedule, app, monitor_name): - # type: (Any, Celery, str) -> MonitorConfig - monitor_config = {} # type: MonitorConfig - schedule_type = None # type: Optional[MonitorConfigScheduleType] - schedule_value = None # type: Optional[Union[str, int]] - schedule_unit = None # type: Optional[MonitorConfigScheduleUnit] +def _get_monitor_config( + celery_schedule: Any, app: Celery, monitor_name: str +) -> MonitorConfig: + monitor_config: MonitorConfig = {} + schedule_type: Optional[MonitorConfigScheduleType] = None + schedule_value: Optional[Union[str, int]] = None + schedule_unit: Optional[MonitorConfigScheduleUnit] = None if isinstance(celery_schedule, crontab): schedule_type = "crontab" @@ -113,8 +114,11 @@ def _get_monitor_config(celery_schedule, app, monitor_name): return monitor_config -def _apply_crons_data_to_schedule_entry(scheduler, schedule_entry, integration): - # type: (Any, Any, sentry_sdk.integrations.celery.CeleryIntegration) -> None +def _apply_crons_data_to_schedule_entry( + scheduler: Any, + schedule_entry: Any, + integration: sentry_sdk.integrations.celery.CeleryIntegration, +) -> None: """ Add Sentry Crons information to the schedule_entry headers. """ @@ -158,8 +162,7 @@ def _apply_crons_data_to_schedule_entry(scheduler, schedule_entry, integration): schedule_entry.options["headers"] = headers -def _wrap_beat_scheduler(original_function): - # type: (Callable[..., Any]) -> Callable[..., Any] +def _wrap_beat_scheduler(original_function: Callable[..., Any]) -> Callable[..., Any]: """ Makes sure that: - a new Sentry trace is started for each task started by Celery Beat and @@ -178,8 +181,7 @@ def _wrap_beat_scheduler(original_function): from sentry_sdk.integrations.celery import CeleryIntegration - def sentry_patched_scheduler(*args, **kwargs): - # type: (*Any, **Any) -> None + def sentry_patched_scheduler(*args: Any, **kwargs: Any) -> None: integration = sentry_sdk.get_client().get_integration(CeleryIntegration) if integration is None: return original_function(*args, **kwargs) @@ -197,29 +199,25 @@ def sentry_patched_scheduler(*args, **kwargs): return sentry_patched_scheduler -def _patch_beat_apply_entry(): - # type: () -> None +def _patch_beat_apply_entry() -> None: Scheduler.apply_entry = _wrap_beat_scheduler(Scheduler.apply_entry) -def _patch_redbeat_apply_async(): - # type: () -> None +def _patch_redbeat_apply_async() -> None: if RedBeatScheduler is None: return RedBeatScheduler.apply_async = _wrap_beat_scheduler(RedBeatScheduler.apply_async) -def _setup_celery_beat_signals(monitor_beat_tasks): - # type: (bool) -> None +def _setup_celery_beat_signals(monitor_beat_tasks: bool) -> None: if monitor_beat_tasks: task_success.connect(crons_task_success) task_failure.connect(crons_task_failure) task_retry.connect(crons_task_retry) -def crons_task_success(sender, **kwargs): - # type: (Task, dict[Any, Any]) -> None +def crons_task_success(sender: Task, **kwargs: dict[Any, Any]) -> None: logger.debug("celery_task_success %s", sender) headers = _get_headers(sender) @@ -243,8 +241,7 @@ def crons_task_success(sender, **kwargs): ) -def crons_task_failure(sender, **kwargs): - # type: (Task, dict[Any, Any]) -> None +def crons_task_failure(sender: Task, **kwargs: dict[Any, Any]) -> None: logger.debug("celery_task_failure %s", sender) headers = _get_headers(sender) @@ -268,8 +265,7 @@ def crons_task_failure(sender, **kwargs): ) -def crons_task_retry(sender, **kwargs): - # type: (Task, dict[Any, Any]) -> None +def crons_task_retry(sender: Task, **kwargs: dict[Any, Any]) -> None: logger.debug("celery_task_retry %s", sender) headers = _get_headers(sender) diff --git a/sentry_sdk/integrations/celery/utils.py b/sentry_sdk/integrations/celery/utils.py index a1961b15bc..eb96cb9016 100644 --- a/sentry_sdk/integrations/celery/utils.py +++ b/sentry_sdk/integrations/celery/utils.py @@ -1,13 +1,20 @@ +from __future__ import annotations import time -from typing import TYPE_CHECKING, cast +from typing import TYPE_CHECKING if TYPE_CHECKING: - from typing import Any, Tuple + from typing import Any, Tuple, List from sentry_sdk._types import MonitorConfigScheduleUnit -def _now_seconds_since_epoch(): - # type: () -> float +TIME_UNITS: List[Tuple[MonitorConfigScheduleUnit, float]] = [ + ("day", 60 * 60 * 24.0), + ("hour", 60 * 60.0), + ("minute", 60.0), +] + + +def _now_seconds_since_epoch() -> float: # We cannot use `time.perf_counter()` when dealing with the duration # of a Celery task, because the start of a Celery task and # the end are recorded in different processes. @@ -16,28 +23,19 @@ def _now_seconds_since_epoch(): return time.time() -def _get_humanized_interval(seconds): - # type: (float) -> Tuple[int, MonitorConfigScheduleUnit] - TIME_UNITS = ( # noqa: N806 - ("day", 60 * 60 * 24.0), - ("hour", 60 * 60.0), - ("minute", 60.0), - ) - +def _get_humanized_interval(seconds: float) -> Tuple[int, MonitorConfigScheduleUnit]: seconds = float(seconds) for unit, divider in TIME_UNITS: if seconds >= divider: interval = int(seconds / divider) - return (interval, cast("MonitorConfigScheduleUnit", unit)) + return (interval, unit) return (int(seconds), "second") class NoOpMgr: - def __enter__(self): - # type: () -> None + def __enter__(self) -> None: return None - def __exit__(self, exc_type, exc_value, traceback): - # type: (Any, Any, Any) -> None + def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None: return None diff --git a/sentry_sdk/integrations/chalice.py b/sentry_sdk/integrations/chalice.py index 947e41ebf7..8a4e95ba00 100644 --- a/sentry_sdk/integrations/chalice.py +++ b/sentry_sdk/integrations/chalice.py @@ -1,3 +1,4 @@ +from __future__ import annotations import sys from functools import wraps @@ -32,8 +33,7 @@ class EventSourceHandler(ChaliceEventSourceHandler): # type: ignore - def __call__(self, event, context): - # type: (Any, Any) -> Any + def __call__(self, event: Any, context: Any) -> Any: client = sentry_sdk.get_client() with sentry_sdk.isolation_scope() as scope: @@ -56,11 +56,9 @@ def __call__(self, event, context): reraise(*exc_info) -def _get_view_function_response(app, view_function, function_args): - # type: (Any, F, Any) -> F +def _get_view_function_response(app: Any, view_function: F, function_args: Any) -> F: @wraps(view_function) - def wrapped_view_function(**function_args): - # type: (**Any) -> Any + def wrapped_view_function(**function_args: Any) -> Any: client = sentry_sdk.get_client() with sentry_sdk.isolation_scope() as scope: with capture_internal_exceptions(): @@ -99,8 +97,7 @@ class ChaliceIntegration(Integration): identifier = "chalice" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: version = parse_version(CHALICE_VERSION) @@ -116,8 +113,9 @@ def setup_once(): RestAPIEventHandler._get_view_function_response ) - def sentry_event_response(app, view_function, function_args): - # type: (Any, F, Dict[str, Any]) -> Any + def sentry_event_response( + app: Any, view_function: F, function_args: Dict[str, Any] + ) -> Any: wrapped_view_function = _get_view_function_response( app, view_function, function_args ) diff --git a/sentry_sdk/integrations/clickhouse_driver.py b/sentry_sdk/integrations/clickhouse_driver.py index 2561bfad04..5a2c0bda50 100644 --- a/sentry_sdk/integrations/clickhouse_driver.py +++ b/sentry_sdk/integrations/clickhouse_driver.py @@ -1,31 +1,22 @@ +from __future__ import annotations import sentry_sdk from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable from sentry_sdk.tracing import Span from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.utils import capture_internal_exceptions, ensure_integration_enabled +from sentry_sdk.utils import ( + _serialize_span_attribute, + capture_internal_exceptions, + ensure_integration_enabled, +) -from typing import TYPE_CHECKING, TypeVar +from typing import TYPE_CHECKING -# Hack to get new Python features working in older versions -# without introducing a hard dependency on `typing_extensions` -# from: https://stackoverflow.com/a/71944042/300572 if TYPE_CHECKING: - from typing import ParamSpec, Callable -else: - # Fake ParamSpec - class ParamSpec: - def __init__(self, _): - self.args = None - self.kwargs = None + from typing import ParamSpec, Callable, Any, Dict, TypeVar - # Callable[anything] will return None - class _Callable: - def __getitem__(self, _): - return None - - # Make instances - Callable = _Callable() + P = ParamSpec("P") + T = TypeVar("T") try: @@ -68,10 +59,6 @@ def setup_once() -> None: ) -P = ParamSpec("P") -T = TypeVar("T") - - def _wrap_start(f: Callable[P, T]) -> Callable[P, T]: @ensure_integration_enabled(ClickhouseDriverIntegration, f) def _inner(*args: P.args, **kwargs: P.kwargs) -> T: @@ -84,19 +71,22 @@ def _inner(*args: P.args, **kwargs: P.kwargs) -> T: op=OP.DB, name=query, origin=ClickhouseDriverIntegration.origin, + only_as_child_span=True, ) connection._sentry_span = span # type: ignore[attr-defined] - _set_db_data(span, connection) - - span.set_data("query", query) + data: dict[str, Any] = _get_db_data(connection) + data["db.query.text"] = query if query_id: - span.set_data("db.query_id", query_id) + data["db.query_id"] = query_id if params and should_send_default_pii(): - span.set_data("db.params", params) + data["db.params"] = params + + connection._sentry_db_data = data # type: ignore[attr-defined] + _set_on_span(span, data) # run the original code ret = f(*args, **kwargs) @@ -109,20 +99,36 @@ def _inner(*args: P.args, **kwargs: P.kwargs) -> T: def _wrap_end(f: Callable[P, T]) -> Callable[P, T]: def _inner_end(*args: P.args, **kwargs: P.kwargs) -> T: res = f(*args, **kwargs) - instance = args[0] - span = getattr(instance.connection, "_sentry_span", None) # type: ignore[attr-defined] + client = args[0] + if not isinstance(client, clickhouse_driver.client.Client): + return res + + connection = client.connection + + span = getattr(connection, "_sentry_span", None) if span is not None: + data = getattr(connection, "_sentry_db_data", {}) + if res is not None and should_send_default_pii(): - span.set_data("db.result", res) + data["db.result"] = res + span.set_attribute("db.result", _serialize_span_attribute(res)) with capture_internal_exceptions(): - span.scope.add_breadcrumb( - message=span._data.pop("query"), category="query", data=span._data - ) + query = data.pop("db.query.text", None) + if query: + sentry_sdk.add_breadcrumb( + message=query, category="query", data=data + ) span.finish() + try: + del connection._sentry_db_data + del connection._sentry_span + except AttributeError: + pass + return res return _inner_end @@ -130,28 +136,43 @@ def _inner_end(*args: P.args, **kwargs: P.kwargs) -> T: def _wrap_send_data(f: Callable[P, T]) -> Callable[P, T]: def _inner_send_data(*args: P.args, **kwargs: P.kwargs) -> T: - instance = args[0] # type: clickhouse_driver.client.Client - data = args[2] - span = getattr(instance.connection, "_sentry_span", None) + client = args[0] + if not isinstance(client, clickhouse_driver.client.Client): + return f(*args, **kwargs) + + connection = client.connection + span = getattr(connection, "_sentry_span", None) if span is not None: - _set_db_data(span, instance.connection) + data = _get_db_data(connection) + _set_on_span(span, data) if should_send_default_pii(): - db_params = span._data.get("db.params", []) - db_params.extend(data) - span.set_data("db.params", db_params) + saved_db_data: dict[str, Any] = getattr( + connection, "_sentry_db_data", {} + ) + db_params: list[Any] = saved_db_data.get("db.params") or [] + db_params_data = args[2] + if isinstance(db_params_data, list): + db_params.extend(db_params_data) + saved_db_data["db.params"] = db_params + span.set_attribute("db.params", _serialize_span_attribute(db_params)) return f(*args, **kwargs) return _inner_send_data -def _set_db_data( - span: Span, connection: clickhouse_driver.connection.Connection -) -> None: - span.set_data(SPANDATA.DB_SYSTEM, "clickhouse") - span.set_data(SPANDATA.SERVER_ADDRESS, connection.host) - span.set_data(SPANDATA.SERVER_PORT, connection.port) - span.set_data(SPANDATA.DB_NAME, connection.database) - span.set_data(SPANDATA.DB_USER, connection.user) +def _get_db_data(connection: clickhouse_driver.connection.Connection) -> Dict[str, str]: + return { + SPANDATA.DB_SYSTEM: "clickhouse", + SPANDATA.SERVER_ADDRESS: connection.host, + SPANDATA.SERVER_PORT: connection.port, + SPANDATA.DB_NAME: connection.database, + SPANDATA.DB_USER: connection.user, + } + + +def _set_on_span(span: Span, data: Dict[str, Any]) -> None: + for key, value in data.items(): + span.set_attribute(key, _serialize_span_attribute(value)) diff --git a/sentry_sdk/integrations/cloud_resource_context.py b/sentry_sdk/integrations/cloud_resource_context.py index ca5ae47e6b..607899a5a7 100644 --- a/sentry_sdk/integrations/cloud_resource_context.py +++ b/sentry_sdk/integrations/cloud_resource_context.py @@ -1,3 +1,4 @@ +from __future__ import annotations import json import urllib3 @@ -65,13 +66,11 @@ class CloudResourceContextIntegration(Integration): gcp_metadata = None - def __init__(self, cloud_provider=""): - # type: (str) -> None + def __init__(self, cloud_provider: str = "") -> None: CloudResourceContextIntegration.cloud_provider = cloud_provider @classmethod - def _is_aws(cls): - # type: () -> bool + def _is_aws(cls) -> bool: try: r = cls.http.request( "PUT", @@ -95,8 +94,7 @@ def _is_aws(cls): return False @classmethod - def _get_aws_context(cls): - # type: () -> Dict[str, str] + def _get_aws_context(cls) -> Dict[str, str]: ctx = { "cloud.provider": CLOUD_PROVIDER.AWS, "cloud.platform": CLOUD_PLATFORM.AWS_EC2, @@ -149,8 +147,7 @@ def _get_aws_context(cls): return ctx @classmethod - def _is_gcp(cls): - # type: () -> bool + def _is_gcp(cls) -> bool: try: r = cls.http.request( "GET", @@ -174,8 +171,7 @@ def _is_gcp(cls): return False @classmethod - def _get_gcp_context(cls): - # type: () -> Dict[str, str] + def _get_gcp_context(cls) -> Dict[str, str]: ctx = { "cloud.provider": CLOUD_PROVIDER.GCP, "cloud.platform": CLOUD_PLATFORM.GCP_COMPUTE_ENGINE, @@ -229,8 +225,7 @@ def _get_gcp_context(cls): return ctx @classmethod - def _get_cloud_provider(cls): - # type: () -> str + def _get_cloud_provider(cls) -> str: if cls._is_aws(): return CLOUD_PROVIDER.AWS @@ -240,8 +235,7 @@ def _get_cloud_provider(cls): return "" @classmethod - def _get_cloud_resource_context(cls): - # type: () -> Dict[str, str] + def _get_cloud_resource_context(cls) -> Dict[str, str]: cloud_provider = ( cls.cloud_provider if cls.cloud_provider != "" @@ -253,8 +247,7 @@ def _get_cloud_resource_context(cls): return {} @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: cloud_provider = CloudResourceContextIntegration.cloud_provider unsupported_cloud_provider = ( cloud_provider != "" and cloud_provider not in context_getters.keys() diff --git a/sentry_sdk/integrations/cohere.py b/sentry_sdk/integrations/cohere.py index 57ffdb908a..ec95d0a6a1 100644 --- a/sentry_sdk/integrations/cohere.py +++ b/sentry_sdk/integrations/cohere.py @@ -1,3 +1,4 @@ +from __future__ import annotations from functools import wraps from sentry_sdk import consts @@ -70,20 +71,17 @@ class CohereIntegration(Integration): identifier = "cohere" origin = f"auto.ai.{identifier}" - def __init__(self, include_prompts=True): - # type: (CohereIntegration, bool) -> None + def __init__(self: CohereIntegration, include_prompts: bool = True) -> None: self.include_prompts = include_prompts @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: BaseCohere.chat = _wrap_chat(BaseCohere.chat, streaming=False) Client.embed = _wrap_embed(Client.embed) BaseCohere.chat_stream = _wrap_chat(BaseCohere.chat_stream, streaming=True) -def _capture_exception(exc): - # type: (Any) -> None +def _capture_exception(exc: Any) -> None: event, hint = event_from_exception( exc, client_options=sentry_sdk.get_client().options, @@ -92,11 +90,11 @@ def _capture_exception(exc): sentry_sdk.capture_event(event, hint=hint) -def _wrap_chat(f, streaming): - # type: (Callable[..., Any], bool) -> Callable[..., Any] +def _wrap_chat(f: Callable[..., Any], streaming: bool) -> Callable[..., Any]: - def collect_chat_response_fields(span, res, include_pii): - # type: (Span, NonStreamedChatResponse, bool) -> None + def collect_chat_response_fields( + span: Span, res: NonStreamedChatResponse, include_pii: bool + ) -> None: if include_pii: if hasattr(res, "text"): set_data_normalized( @@ -130,8 +128,7 @@ def collect_chat_response_fields(span, res, include_pii): set_data_normalized(span, SPANDATA.AI_WARNINGS, res.meta.warnings) @wraps(f) - def new_chat(*args, **kwargs): - # type: (*Any, **Any) -> Any + def new_chat(*args: Any, **kwargs: Any) -> Any: integration = sentry_sdk.get_client().get_integration(CohereIntegration) if ( @@ -147,6 +144,7 @@ def new_chat(*args, **kwargs): op=consts.OP.COHERE_CHAT_COMPLETIONS_CREATE, name="cohere.client.Chat", origin=CohereIntegration.origin, + only_as_child_span=True, ) span.__enter__() try: @@ -184,8 +182,7 @@ def new_chat(*args, **kwargs): if streaming: old_iterator = res - def new_iterator(): - # type: () -> Iterator[StreamedChatResponse] + def new_iterator() -> Iterator[StreamedChatResponse]: with capture_internal_exceptions(): for x in old_iterator: @@ -219,12 +216,10 @@ def new_iterator(): return new_chat -def _wrap_embed(f): - # type: (Callable[..., Any]) -> Callable[..., Any] +def _wrap_embed(f: Callable[..., Any]) -> Callable[..., Any]: @wraps(f) - def new_embed(*args, **kwargs): - # type: (*Any, **Any) -> Any + def new_embed(*args: Any, **kwargs: Any) -> Any: integration = sentry_sdk.get_client().get_integration(CohereIntegration) if integration is None: return f(*args, **kwargs) @@ -233,6 +228,7 @@ def new_embed(*args, **kwargs): op=consts.OP.COHERE_EMBEDDINGS_CREATE, name="Cohere Embedding Creation", origin=CohereIntegration.origin, + only_as_child_span=True, ) as span: if "texts" in kwargs and ( should_send_default_pii() and integration.include_prompts diff --git a/sentry_sdk/integrations/dedupe.py b/sentry_sdk/integrations/dedupe.py index a115e35292..2434b531cb 100644 --- a/sentry_sdk/integrations/dedupe.py +++ b/sentry_sdk/integrations/dedupe.py @@ -1,3 +1,4 @@ +from __future__ import annotations import sentry_sdk from sentry_sdk.utils import ContextVar from sentry_sdk.integrations import Integration @@ -14,16 +15,13 @@ class DedupeIntegration(Integration): identifier = "dedupe" - def __init__(self): - # type: () -> None + def __init__(self) -> None: self._last_seen = ContextVar("last-seen") @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: @add_global_event_processor - def processor(event, hint): - # type: (Event, Optional[Hint]) -> Optional[Event] + def processor(event: Event, hint: Optional[Hint]) -> Optional[Event]: if hint is None: return event @@ -42,8 +40,7 @@ def processor(event, hint): return event @staticmethod - def reset_last_seen(): - # type: () -> None + def reset_last_seen() -> None: integration = sentry_sdk.get_client().get_integration(DedupeIntegration) if integration is None: return diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index 2041598fa0..8895d3924e 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -1,3 +1,5 @@ +from __future__ import annotations +import functools import inspect import sys import threading @@ -5,10 +7,9 @@ from importlib import import_module import sentry_sdk -from sentry_sdk.consts import OP, SPANDATA +from sentry_sdk.consts import OP, SPANDATA, SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.scope import add_global_event_processor, should_send_default_pii from sentry_sdk.serializer import add_global_repr_processor, add_repr_sequence_type -from sentry_sdk.tracing import SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.tracing_utils import add_query_source, record_sql_queries from sentry_sdk.utils import ( AnnotatedValue, @@ -55,6 +56,7 @@ except ImportError: raise DidNotEnable("Django not installed") +from sentry_sdk.integrations.django.caching import patch_caching from sentry_sdk.integrations.django.transactions import LEGACY_RESOLVER from sentry_sdk.integrations.django.templates import ( get_template_frame_from_exception, @@ -64,11 +66,6 @@ from sentry_sdk.integrations.django.signals_handlers import patch_signals from sentry_sdk.integrations.django.views import patch_views -if DJANGO_VERSION[:2] > (1, 8): - from sentry_sdk.integrations.django.caching import patch_caching -else: - patch_caching = None # type: ignore - from typing import TYPE_CHECKING if TYPE_CHECKING: @@ -89,19 +86,6 @@ from sentry_sdk._types import Event, Hint, EventProcessor, NotImplementedType -if DJANGO_VERSION < (1, 10): - - def is_authenticated(request_user): - # type: (Any) -> bool - return request_user.is_authenticated() - -else: - - def is_authenticated(request_user): - # type: (Any) -> bool - return request_user.is_authenticated - - TRANSACTION_STYLE_VALUES = ("function_name", "url") @@ -124,18 +108,17 @@ class DjangoIntegration(Integration): middleware_spans = None signals_spans = None cache_spans = None - signals_denylist = [] # type: list[signals.Signal] + signals_denylist: list[signals.Signal] = [] def __init__( self, - transaction_style="url", # type: str - middleware_spans=True, # type: bool - signals_spans=True, # type: bool - cache_spans=False, # type: bool - signals_denylist=None, # type: Optional[list[signals.Signal]] - http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: tuple[str, ...] - ): - # type: (...) -> None + transaction_style: str = "url", + middleware_spans: bool = True, + signals_spans: bool = True, + cache_spans: bool = True, + signals_denylist: Optional[list[signals.Signal]] = None, + http_methods_to_capture: tuple[str, ...] = DEFAULT_HTTP_METHODS_TO_CAPTURE, + ) -> None: if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" @@ -152,8 +135,7 @@ def __init__( self.http_methods_to_capture = tuple(map(str.upper, http_methods_to_capture)) @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: _check_minimum_version(DjangoIntegration, DJANGO_VERSION) install_sql_hook() @@ -168,8 +150,9 @@ def setup_once(): old_app = WSGIHandler.__call__ @ensure_integration_enabled(DjangoIntegration, old_app) - def sentry_patched_wsgi_handler(self, environ, start_response): - # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse + def sentry_patched_wsgi_handler( + self: Any, environ: Dict[str, str], start_response: Callable[..., Any] + ) -> _ScopedResponse: bound_old_app = old_app.__get__(self, WSGIHandler) from django.conf import settings @@ -199,8 +182,9 @@ def sentry_patched_wsgi_handler(self, environ, start_response): signals.got_request_exception.connect(_got_request_exception) @add_global_event_processor - def process_django_templates(event, hint): - # type: (Event, Optional[Hint]) -> Optional[Event] + def process_django_templates( + event: Event, hint: Optional[Hint] + ) -> Optional[Event]: if hint is None: return event @@ -242,8 +226,9 @@ def process_django_templates(event, hint): return event @add_global_repr_processor - def _django_queryset_repr(value, hint): - # type: (Any, Dict[str, Any]) -> Union[NotImplementedType, str] + def _django_queryset_repr( + value: Any, hint: Dict[str, Any] + ) -> Union[NotImplementedType, str]: try: # Django 1.6 can fail to import `QuerySet` when Django settings # have not yet been initialized. @@ -279,8 +264,7 @@ def _django_queryset_repr(value, hint): _DRF_PATCH_LOCK = threading.Lock() -def _patch_drf(): - # type: () -> None +def _patch_drf() -> None: """ Patch Django Rest Framework for more/better request data. DRF's request type is a wrapper around Django's request type. The attribute we're @@ -322,8 +306,10 @@ def _patch_drf(): else: old_drf_initial = APIView.initial - def sentry_patched_drf_initial(self, request, *args, **kwargs): - # type: (APIView, Any, *Any, **Any) -> Any + @functools.wraps(old_drf_initial) + def sentry_patched_drf_initial( + self: APIView, request: Any, *args: Any, **kwargs: Any + ) -> Any: with capture_internal_exceptions(): request._request._sentry_drf_request_backref = weakref.ref( request @@ -334,8 +320,7 @@ def sentry_patched_drf_initial(self, request, *args, **kwargs): APIView.initial = sentry_patched_drf_initial -def _patch_channels(): - # type: () -> None +def _patch_channels() -> None: try: from channels.http import AsgiHandler # type: ignore except ImportError: @@ -359,8 +344,7 @@ def _patch_channels(): patch_channels_asgi_handler_impl(AsgiHandler) -def _patch_django_asgi_handler(): - # type: () -> None +def _patch_django_asgi_handler() -> None: try: from django.core.handlers.asgi import ASGIHandler except ImportError: @@ -381,8 +365,9 @@ def _patch_django_asgi_handler(): patch_django_asgi_handler_impl(ASGIHandler) -def _set_transaction_name_and_source(scope, transaction_style, request): - # type: (sentry_sdk.Scope, str, WSGIRequest) -> None +def _set_transaction_name_and_source( + scope: sentry_sdk.Scope, transaction_style: str, request: WSGIRequest +) -> None: try: transaction_name = None if transaction_style == "function_name": @@ -414,17 +399,18 @@ def _set_transaction_name_and_source(scope, transaction_style, request): if hasattr(urlconf, "handler404"): handler = urlconf.handler404 if isinstance(handler, str): - scope.transaction = handler + scope.set_transaction_name(handler) else: - scope.transaction = transaction_from_function( + name = transaction_from_function( getattr(handler, "view_class", handler) ) + if isinstance(name, str): + scope.set_transaction_name(name) except Exception: pass -def _before_get_response(request): - # type: (WSGIRequest) -> None +def _before_get_response(request: WSGIRequest) -> None: integration = sentry_sdk.get_client().get_integration(DjangoIntegration) if integration is None: return @@ -440,8 +426,9 @@ def _before_get_response(request): ) -def _attempt_resolve_again(request, scope, transaction_style): - # type: (WSGIRequest, sentry_sdk.Scope, str) -> None +def _attempt_resolve_again( + request: WSGIRequest, scope: sentry_sdk.Scope, transaction_style: str +) -> None: """ Some django middlewares overwrite request.urlconf so we need to respect that contract, @@ -453,8 +440,7 @@ def _attempt_resolve_again(request, scope, transaction_style): _set_transaction_name_and_source(scope, transaction_style, request) -def _after_get_response(request): - # type: (WSGIRequest) -> None +def _after_get_response(request: WSGIRequest) -> None: integration = sentry_sdk.get_client().get_integration(DjangoIntegration) if integration is None or integration.transaction_style != "url": return @@ -463,8 +449,7 @@ def _after_get_response(request): _attempt_resolve_again(request, scope, integration.transaction_style) -def _patch_get_response(): - # type: () -> None +def _patch_get_response() -> None: """ patch get_response, because at that point we have the Django request object """ @@ -472,8 +457,10 @@ def _patch_get_response(): old_get_response = BaseHandler.get_response - def sentry_patched_get_response(self, request): - # type: (Any, WSGIRequest) -> Union[HttpResponse, BaseException] + @functools.wraps(old_get_response) + def sentry_patched_get_response( + self: Any, request: WSGIRequest + ) -> Union[HttpResponse, BaseException]: _before_get_response(request) rv = old_get_response(self, request) _after_get_response(request) @@ -487,10 +474,10 @@ def sentry_patched_get_response(self, request): patch_get_response_async(BaseHandler, _before_get_response) -def _make_wsgi_request_event_processor(weak_request, integration): - # type: (Callable[[], WSGIRequest], DjangoIntegration) -> EventProcessor - def wsgi_request_event_processor(event, hint): - # type: (Event, dict[str, Any]) -> Event +def _make_wsgi_request_event_processor( + weak_request: Callable[[], WSGIRequest], integration: DjangoIntegration +) -> EventProcessor: + def wsgi_request_event_processor(event: Event, hint: dict[str, Any]) -> Event: # if the request is gone we are fine not logging the data from # it. This might happen if the processor is pushed away to # another thread. @@ -515,8 +502,7 @@ def wsgi_request_event_processor(event, hint): return wsgi_request_event_processor -def _got_request_exception(request=None, **kwargs): - # type: (WSGIRequest, **Any) -> None +def _got_request_exception(request: WSGIRequest = None, **kwargs: Any) -> None: client = sentry_sdk.get_client() integration = client.get_integration(DjangoIntegration) if integration is None: @@ -535,8 +521,7 @@ def _got_request_exception(request=None, **kwargs): class DjangoRequestExtractor(RequestExtractor): - def __init__(self, request): - # type: (Union[WSGIRequest, ASGIRequest]) -> None + def __init__(self, request: Union[WSGIRequest, ASGIRequest]) -> None: try: drf_request = request._sentry_drf_request_backref() if drf_request is not None: @@ -545,18 +530,16 @@ def __init__(self, request): pass self.request = request - def env(self): - # type: () -> Dict[str, str] + def env(self) -> Dict[str, str]: return self.request.META - def cookies(self): - # type: () -> Dict[str, Union[str, AnnotatedValue]] + def cookies(self) -> Dict[str, Union[str, AnnotatedValue]]: privacy_cookies = [ django_settings.CSRF_COOKIE_NAME, django_settings.SESSION_COOKIE_NAME, ] - clean_cookies = {} # type: Dict[str, Union[str, AnnotatedValue]] + clean_cookies: Dict[str, Union[str, AnnotatedValue]] = {} for key, val in self.request.COOKIES.items(): if key in privacy_cookies: clean_cookies[key] = SENSITIVE_DATA_SUBSTITUTE @@ -565,37 +548,31 @@ def cookies(self): return clean_cookies - def raw_data(self): - # type: () -> bytes + def raw_data(self) -> bytes: return self.request.body - def form(self): - # type: () -> QueryDict + def form(self) -> QueryDict: return self.request.POST - def files(self): - # type: () -> MultiValueDict + def files(self) -> MultiValueDict: return self.request.FILES - def size_of_file(self, file): - # type: (Any) -> int + def size_of_file(self, file: Any) -> int: return file.size - def parsed_body(self): - # type: () -> Optional[Dict[str, Any]] + def parsed_body(self) -> Optional[Dict[str, Any]]: try: return self.request.data except Exception: return RequestExtractor.parsed_body(self) -def _set_user_info(request, event): - # type: (WSGIRequest, Event) -> None +def _set_user_info(request: WSGIRequest, event: Event) -> None: user_info = event.setdefault("user", {}) user = getattr(request, "user", None) - if user is None or not is_authenticated(user): + if user is None or not user.is_authenticated: return try: @@ -614,32 +591,21 @@ def _set_user_info(request, event): pass -def install_sql_hook(): - # type: () -> None +def install_sql_hook() -> None: """If installed this causes Django's queries to be captured.""" try: from django.db.backends.utils import CursorWrapper except ImportError: from django.db.backends.util import CursorWrapper - try: - # django 1.6 and 1.7 compatability - from django.db.backends import BaseDatabaseWrapper - except ImportError: - # django 1.8 or later - from django.db.backends.base.base import BaseDatabaseWrapper + from django.db.backends.base.base import BaseDatabaseWrapper - try: - real_execute = CursorWrapper.execute - real_executemany = CursorWrapper.executemany - real_connect = BaseDatabaseWrapper.connect - except AttributeError: - # This won't work on Django versions < 1.6 - return + real_execute = CursorWrapper.execute + real_executemany = CursorWrapper.executemany + real_connect = BaseDatabaseWrapper.connect @ensure_integration_enabled(DjangoIntegration, real_execute) - def execute(self, sql, params=None): - # type: (CursorWrapper, Any, Optional[Any]) -> Any + def execute(self: CursorWrapper, sql: Any, params: Optional[Any] = None) -> Any: with record_sql_queries( cursor=self.cursor, query=sql, @@ -651,14 +617,13 @@ def execute(self, sql, params=None): _set_db_data(span, self) result = real_execute(self, sql, params) - with capture_internal_exceptions(): - add_query_source(span) + with capture_internal_exceptions(): + add_query_source(span) return result @ensure_integration_enabled(DjangoIntegration, real_executemany) - def executemany(self, sql, param_list): - # type: (CursorWrapper, Any, List[Any]) -> Any + def executemany(self: CursorWrapper, sql: Any, param_list: List[Any]) -> Any: with record_sql_queries( cursor=self.cursor, query=sql, @@ -671,14 +636,13 @@ def executemany(self, sql, param_list): result = real_executemany(self, sql, param_list) - with capture_internal_exceptions(): - add_query_source(span) + with capture_internal_exceptions(): + add_query_source(span) return result @ensure_integration_enabled(DjangoIntegration, real_connect) - def connect(self): - # type: (BaseDatabaseWrapper) -> None + def connect(self: BaseDatabaseWrapper) -> None: with capture_internal_exceptions(): sentry_sdk.add_breadcrumb(message="connect", category="query") @@ -686,6 +650,7 @@ def connect(self): op=OP.DB, name="connect", origin=DjangoIntegration.origin_db, + only_as_child_span=True, ) as span: _set_db_data(span, self) return real_connect(self) @@ -696,11 +661,10 @@ def connect(self): ignore_logger("django.db.backends") -def _set_db_data(span, cursor_or_db): - # type: (Span, Any) -> None +def _set_db_data(span: Span, cursor_or_db: Any) -> None: db = cursor_or_db.db if hasattr(cursor_or_db, "db") else cursor_or_db vendor = db.vendor - span.set_data(SPANDATA.DB_SYSTEM, vendor) + span.set_attribute(SPANDATA.DB_SYSTEM, vendor) # Some custom backends override `__getattr__`, making it look like `cursor_or_db` # actually has a `connection` and the `connection` has a `get_dsn_parameters` @@ -733,23 +697,22 @@ def _set_db_data(span, cursor_or_db): db_name = connection_params.get("dbname") or connection_params.get("database") if db_name is not None: - span.set_data(SPANDATA.DB_NAME, db_name) + span.set_attribute(SPANDATA.DB_NAME, db_name) server_address = connection_params.get("host") if server_address is not None: - span.set_data(SPANDATA.SERVER_ADDRESS, server_address) + span.set_attribute(SPANDATA.SERVER_ADDRESS, server_address) server_port = connection_params.get("port") if server_port is not None: - span.set_data(SPANDATA.SERVER_PORT, str(server_port)) + span.set_attribute(SPANDATA.SERVER_PORT, str(server_port)) server_socket_address = connection_params.get("unix_socket") if server_socket_address is not None: - span.set_data(SPANDATA.SERVER_SOCKET_ADDRESS, server_socket_address) + span.set_attribute(SPANDATA.SERVER_SOCKET_ADDRESS, server_socket_address) -def add_template_context_repr_sequence(): - # type: () -> None +def add_template_context_repr_sequence() -> None: try: from django.template.context import BaseContext diff --git a/sentry_sdk/integrations/django/asgi.py b/sentry_sdk/integrations/django/asgi.py index 63a3f0b8f2..e1718d24c7 100644 --- a/sentry_sdk/integrations/django/asgi.py +++ b/sentry_sdk/integrations/django/asgi.py @@ -1,3 +1,5 @@ +from __future__ import annotations + """ Instrumentation for Django 3.0 @@ -51,10 +53,8 @@ def markcoroutinefunction(func: "_F") -> "_F": return func -def _make_asgi_request_event_processor(request): - # type: (ASGIRequest) -> EventProcessor - def asgi_request_event_processor(event, hint): - # type: (Event, dict[str, Any]) -> Event +def _make_asgi_request_event_processor(request: ASGIRequest) -> EventProcessor: + def asgi_request_event_processor(event: Event, hint: dict[str, Any]) -> Event: # if the request is gone we are fine not logging the data from # it. This might happen if the processor is pushed away to # another thread. @@ -81,15 +81,16 @@ def asgi_request_event_processor(event, hint): return asgi_request_event_processor -def patch_django_asgi_handler_impl(cls): - # type: (Any) -> None +def patch_django_asgi_handler_impl(cls: Any) -> None: from sentry_sdk.integrations.django import DjangoIntegration old_app = cls.__call__ - async def sentry_patched_asgi_handler(self, scope, receive, send): - # type: (Any, Any, Any, Any) -> Any + @functools.wraps(old_app) + async def sentry_patched_asgi_handler( + self: Any, scope: Any, receive: Any, send: Any + ) -> Any: integration = sentry_sdk.get_client().get_integration(DjangoIntegration) if integration is None: return await old_app(self, scope, receive, send) @@ -110,8 +111,7 @@ async def sentry_patched_asgi_handler(self, scope, receive, send): old_create_request = cls.create_request @ensure_integration_enabled(DjangoIntegration, old_create_request) - def sentry_patched_create_request(self, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any + def sentry_patched_create_request(self: Any, *args: Any, **kwargs: Any) -> Any: request, error_response = old_create_request(self, *args, **kwargs) scope = sentry_sdk.get_isolation_scope() scope.add_event_processor(_make_asgi_request_event_processor(request)) @@ -121,20 +121,20 @@ def sentry_patched_create_request(self, *args, **kwargs): cls.create_request = sentry_patched_create_request -def patch_get_response_async(cls, _before_get_response): - # type: (Any, Any) -> None +def patch_get_response_async(cls: Any, _before_get_response: Any) -> None: old_get_response_async = cls.get_response_async - async def sentry_patched_get_response_async(self, request): - # type: (Any, Any) -> Union[HttpResponse, BaseException] + @functools.wraps(old_get_response_async) + async def sentry_patched_get_response_async( + self: Any, request: Any + ) -> Union[HttpResponse, BaseException]: _before_get_response(request) return await old_get_response_async(self, request) cls.get_response_async = sentry_patched_get_response_async -def patch_channels_asgi_handler_impl(cls): - # type: (Any) -> None +def patch_channels_asgi_handler_impl(cls: Any) -> None: import channels # type: ignore from sentry_sdk.integrations.django import DjangoIntegration @@ -142,8 +142,10 @@ def patch_channels_asgi_handler_impl(cls): if channels.__version__ < "3.0.0": old_app = cls.__call__ - async def sentry_patched_asgi_handler(self, receive, send): - # type: (Any, Any, Any) -> Any + @functools.wraps(old_app) + async def sentry_patched_asgi_handler( + self: Any, receive: Any, send: Any + ) -> Any: integration = sentry_sdk.get_client().get_integration(DjangoIntegration) if integration is None: return await old_app(self, receive, send) @@ -165,16 +167,14 @@ async def sentry_patched_asgi_handler(self, receive, send): patch_django_asgi_handler_impl(cls) -def wrap_async_view(callback): - # type: (Any) -> Any +def wrap_async_view(callback: Any) -> Any: from sentry_sdk.integrations.django import DjangoIntegration @functools.wraps(callback) - async def sentry_wrapped_callback(request, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any + async def sentry_wrapped_callback(request: Any, *args: Any, **kwargs: Any) -> Any: current_scope = sentry_sdk.get_current_scope() - if current_scope.transaction is not None: - current_scope.transaction.update_active_thread() + if current_scope.root_span is not None: + current_scope.root_span.update_active_thread() sentry_scope = sentry_sdk.get_isolation_scope() if sentry_scope.profile is not None: @@ -184,14 +184,14 @@ async def sentry_wrapped_callback(request, *args, **kwargs): op=OP.VIEW_RENDER, name=request.resolver_match.view_name, origin=DjangoIntegration.origin, + only_as_child_span=True, ): return await callback(request, *args, **kwargs) return sentry_wrapped_callback -def _asgi_middleware_mixin_factory(_check_middleware_span): - # type: (Callable[..., Any]) -> Any +def _asgi_middleware_mixin_factory(_check_middleware_span: Callable[..., Any]) -> Any: """ Mixin class factory that generates a middleware mixin for handling requests in async mode. @@ -201,14 +201,12 @@ class SentryASGIMixin: if TYPE_CHECKING: _inner = None - def __init__(self, get_response): - # type: (Callable[..., Any]) -> None + def __init__(self, get_response: Callable[..., Any]) -> None: self.get_response = get_response self._acall_method = None self._async_check() - def _async_check(self): - # type: () -> None + def _async_check(self) -> None: """ If get_response is a coroutine function, turns us into async mode so a thread is not consumed during a whole request. @@ -217,16 +215,14 @@ def _async_check(self): if iscoroutinefunction(self.get_response): markcoroutinefunction(self) - def async_route_check(self): - # type: () -> bool + def async_route_check(self) -> bool: """ Function that checks if we are in async mode, and if we are forwards the handling of requests to __acall__ """ return iscoroutinefunction(self.get_response) - async def __acall__(self, *args, **kwargs): - # type: (*Any, **Any) -> Any + async def __acall__(self, *args: Any, **kwargs: Any) -> Any: f = self._acall_method if f is None: if hasattr(self._inner, "__acall__"): diff --git a/sentry_sdk/integrations/django/caching.py b/sentry_sdk/integrations/django/caching.py index 7985611761..cb8685fde2 100644 --- a/sentry_sdk/integrations/django/caching.py +++ b/sentry_sdk/integrations/django/caching.py @@ -1,3 +1,4 @@ +from __future__ import annotations import functools from typing import TYPE_CHECKING from sentry_sdk.integrations.redis.utils import _get_safe_key, _key_as_string @@ -28,22 +29,29 @@ ] -def _get_span_description(method_name, args, kwargs): - # type: (str, tuple[Any], dict[str, Any]) -> str +def _get_span_description( + method_name: str, args: tuple[Any], kwargs: dict[str, Any] +) -> str: return _key_as_string(_get_safe_key(method_name, args, kwargs)) -def _patch_cache_method(cache, method_name, address, port): - # type: (CacheHandler, str, Optional[str], Optional[int]) -> None +def _patch_cache_method( + cache: CacheHandler, method_name: str, address: Optional[str], port: Optional[int] +) -> None: from sentry_sdk.integrations.django import DjangoIntegration original_method = getattr(cache, method_name) @ensure_integration_enabled(DjangoIntegration, original_method) def _instrument_call( - cache, method_name, original_method, args, kwargs, address, port - ): - # type: (CacheHandler, str, Callable[..., Any], tuple[Any, ...], dict[str, Any], Optional[str], Optional[int]) -> Any + cache: CacheHandler, + method_name: str, + original_method: Callable[..., Any], + args: tuple[Any, ...], + kwargs: dict[str, Any], + address: Optional[str], + port: Optional[int], + ) -> Any: is_set_operation = method_name.startswith("set") is_get_operation = not is_set_operation @@ -54,27 +62,28 @@ def _instrument_call( op=op, name=description, origin=DjangoIntegration.origin, + only_as_child_span=True, ) as span: value = original_method(*args, **kwargs) with capture_internal_exceptions(): if address is not None: - span.set_data(SPANDATA.NETWORK_PEER_ADDRESS, address) + span.set_attribute(SPANDATA.NETWORK_PEER_ADDRESS, address) if port is not None: - span.set_data(SPANDATA.NETWORK_PEER_PORT, port) + span.set_attribute(SPANDATA.NETWORK_PEER_PORT, port) key = _get_safe_key(method_name, args, kwargs) if key is not None: - span.set_data(SPANDATA.CACHE_KEY, key) + span.set_attribute(SPANDATA.CACHE_KEY, key) item_size = None if is_get_operation: if value: item_size = len(str(value)) - span.set_data(SPANDATA.CACHE_HIT, True) + span.set_attribute(SPANDATA.CACHE_HIT, True) else: - span.set_data(SPANDATA.CACHE_HIT, False) + span.set_attribute(SPANDATA.CACHE_HIT, False) else: # TODO: We don't handle `get_or_set` which we should arg_count = len(args) if arg_count >= 2: @@ -85,13 +94,12 @@ def _instrument_call( item_size = len(str(args[0])) if item_size is not None: - span.set_data(SPANDATA.CACHE_ITEM_SIZE, item_size) + span.set_attribute(SPANDATA.CACHE_ITEM_SIZE, item_size) return value @functools.wraps(original_method) - def sentry_method(*args, **kwargs): - # type: (*Any, **Any) -> Any + def sentry_method(*args: Any, **kwargs: Any) -> Any: return _instrument_call( cache, method_name, original_method, args, kwargs, address, port ) @@ -99,16 +107,16 @@ def sentry_method(*args, **kwargs): setattr(cache, method_name, sentry_method) -def _patch_cache(cache, address=None, port=None): - # type: (CacheHandler, Optional[str], Optional[int]) -> None +def _patch_cache( + cache: CacheHandler, address: Optional[str] = None, port: Optional[int] = None +) -> None: if not hasattr(cache, "_sentry_patched"): for method_name in METHODS_TO_INSTRUMENT: _patch_cache_method(cache, method_name, address, port) cache._sentry_patched = True -def _get_address_port(settings): - # type: (dict[str, Any]) -> tuple[Optional[str], Optional[int]] +def _get_address_port(settings: dict[str, Any]) -> tuple[Optional[str], Optional[int]]: location = settings.get("LOCATION") # TODO: location can also be an array of locations @@ -133,32 +141,19 @@ def _get_address_port(settings): return address, int(port) if port is not None else None -def should_enable_cache_spans(): - # type: () -> bool +def patch_caching() -> None: from sentry_sdk.integrations.django import DjangoIntegration - client = sentry_sdk.get_client() - integration = client.get_integration(DjangoIntegration) - from django.conf import settings - - return integration is not None and ( - (client.spotlight is not None and settings.DEBUG is True) - or integration.cache_spans is True - ) - - -def patch_caching(): - # type: () -> None if not hasattr(CacheHandler, "_sentry_patched"): if DJANGO_VERSION < (3, 2): original_get_item = CacheHandler.__getitem__ @functools.wraps(original_get_item) - def sentry_get_item(self, alias): - # type: (CacheHandler, str) -> Any + def sentry_get_item(self: CacheHandler, alias: str) -> Any: cache = original_get_item(self, alias) - if should_enable_cache_spans(): + integration = sentry_sdk.get_client().get_integration(DjangoIntegration) + if integration is not None and integration.cache_spans: from django.conf import settings address, port = _get_address_port( @@ -176,11 +171,11 @@ def sentry_get_item(self, alias): original_create_connection = CacheHandler.create_connection @functools.wraps(original_create_connection) - def sentry_create_connection(self, alias): - # type: (CacheHandler, str) -> Any + def sentry_create_connection(self: CacheHandler, alias: str) -> Any: cache = original_create_connection(self, alias) - if should_enable_cache_spans(): + integration = sentry_sdk.get_client().get_integration(DjangoIntegration) + if integration is not None and integration.cache_spans: address, port = _get_address_port(self.settings[alias or "default"]) _patch_cache(cache, address, port) diff --git a/sentry_sdk/integrations/django/middleware.py b/sentry_sdk/integrations/django/middleware.py index 245276566e..232d6a832c 100644 --- a/sentry_sdk/integrations/django/middleware.py +++ b/sentry_sdk/integrations/django/middleware.py @@ -1,3 +1,5 @@ +from __future__ import annotations + """ Create spans from Django middleware invocations """ @@ -38,14 +40,12 @@ from .asgi import _asgi_middleware_mixin_factory -def patch_django_middlewares(): - # type: () -> None +def patch_django_middlewares() -> None: from django.core.handlers import base old_import_string = base.import_string - def sentry_patched_import_string(dotted_path): - # type: (str) -> Any + def sentry_patched_import_string(dotted_path: str) -> Any: rv = old_import_string(dotted_path) if _import_string_should_wrap_middleware.get(None): @@ -57,8 +57,7 @@ def sentry_patched_import_string(dotted_path): old_load_middleware = base.BaseHandler.load_middleware - def sentry_patched_load_middleware(*args, **kwargs): - # type: (Any, Any) -> Any + def sentry_patched_load_middleware(*args: Any, **kwargs: Any) -> Any: _import_string_should_wrap_middleware.set(True) try: return old_load_middleware(*args, **kwargs) @@ -68,12 +67,10 @@ def sentry_patched_load_middleware(*args, **kwargs): base.BaseHandler.load_middleware = sentry_patched_load_middleware -def _wrap_middleware(middleware, middleware_name): - # type: (Any, str) -> Any +def _wrap_middleware(middleware: Any, middleware_name: str) -> Any: from sentry_sdk.integrations.django import DjangoIntegration - def _check_middleware_span(old_method): - # type: (Callable[..., Any]) -> Optional[Span] + def _check_middleware_span(old_method: Callable[..., Any]) -> Optional[Span]: integration = sentry_sdk.get_client().get_integration(DjangoIntegration) if integration is None or not integration.middleware_spans: return None @@ -89,18 +86,17 @@ def _check_middleware_span(old_method): op=OP.MIDDLEWARE_DJANGO, name=description, origin=DjangoIntegration.origin, + only_as_child_span=True, ) middleware_span.set_tag("django.function_name", function_name) middleware_span.set_tag("django.middleware_name", middleware_name) return middleware_span - def _get_wrapped_method(old_method): - # type: (F) -> F + def _get_wrapped_method(old_method: F) -> F: with capture_internal_exceptions(): - def sentry_wrapped_method(*args, **kwargs): - # type: (*Any, **Any) -> Any + def sentry_wrapped_method(*args: Any, **kwargs: Any) -> Any: middleware_span = _check_middleware_span(old_method) if middleware_span is None: @@ -130,8 +126,12 @@ class SentryWrappingMiddleware( middleware, "async_capable", False ) - def __init__(self, get_response=None, *args, **kwargs): - # type: (Optional[Callable[..., Any]], *Any, **Any) -> None + def __init__( + self, + get_response: Optional[Callable[..., Any]] = None, + *args: Any, + **kwargs: Any, + ) -> None: if get_response: self._inner = middleware(get_response, *args, **kwargs) else: @@ -143,8 +143,7 @@ def __init__(self, get_response=None, *args, **kwargs): # We need correct behavior for `hasattr()`, which we can only determine # when we have an instance of the middleware we're wrapping. - def __getattr__(self, method_name): - # type: (str) -> Any + def __getattr__(self, method_name: str) -> Any: if method_name not in ( "process_request", "process_view", @@ -159,8 +158,7 @@ def __getattr__(self, method_name): self.__dict__[method_name] = rv return rv - def __call__(self, *args, **kwargs): - # type: (*Any, **Any) -> Any + def __call__(self, *args: Any, **kwargs: Any) -> Any: if hasattr(self, "async_route_check") and self.async_route_check(): return self.__acall__(*args, **kwargs) diff --git a/sentry_sdk/integrations/django/signals_handlers.py b/sentry_sdk/integrations/django/signals_handlers.py index cb0f8b9d2e..b665bec96e 100644 --- a/sentry_sdk/integrations/django/signals_handlers.py +++ b/sentry_sdk/integrations/django/signals_handlers.py @@ -1,3 +1,4 @@ +from __future__ import annotations from functools import wraps from django.dispatch import Signal @@ -13,8 +14,7 @@ from typing import Any, Union -def _get_receiver_name(receiver): - # type: (Callable[..., Any]) -> str +def _get_receiver_name(receiver: Callable[..., Any]) -> str: name = "" if hasattr(receiver, "__qualname__"): @@ -38,8 +38,7 @@ def _get_receiver_name(receiver): return name -def patch_signals(): - # type: () -> None +def patch_signals() -> None: """ Patch django signal receivers to create a span. @@ -50,26 +49,30 @@ def patch_signals(): old_live_receivers = Signal._live_receivers - def _sentry_live_receivers(self, sender): - # type: (Signal, Any) -> Union[tuple[list[Callable[..., Any]], list[Callable[..., Any]]], list[Callable[..., Any]]] + @wraps(old_live_receivers) + def _sentry_live_receivers(self: Signal, sender: Any) -> Union[ + tuple[list[Callable[..., Any]], list[Callable[..., Any]]], + list[Callable[..., Any]], + ]: if DJANGO_VERSION >= (5, 0): sync_receivers, async_receivers = old_live_receivers(self, sender) else: sync_receivers = old_live_receivers(self, sender) async_receivers = [] - def sentry_sync_receiver_wrapper(receiver): - # type: (Callable[..., Any]) -> Callable[..., Any] + def sentry_sync_receiver_wrapper( + receiver: Callable[..., Any], + ) -> Callable[..., Any]: @wraps(receiver) - def wrapper(*args, **kwargs): - # type: (Any, Any) -> Any + def wrapper(*args: Any, **kwargs: Any) -> Any: signal_name = _get_receiver_name(receiver) with sentry_sdk.start_span( op=OP.EVENT_DJANGO, name=signal_name, origin=DjangoIntegration.origin, + only_as_child_span=True, ) as span: - span.set_data("signal", signal_name) + span.set_attribute("signal", signal_name) return receiver(*args, **kwargs) return wrapper diff --git a/sentry_sdk/integrations/django/templates.py b/sentry_sdk/integrations/django/templates.py index 10e8a924b7..9ee6511fe3 100644 --- a/sentry_sdk/integrations/django/templates.py +++ b/sentry_sdk/integrations/django/templates.py @@ -1,8 +1,9 @@ +from __future__ import annotations import functools from django.template import TemplateSyntaxError +from django.template.base import Origin from django.utils.safestring import mark_safe -from django import VERSION as DJANGO_VERSION import sentry_sdk from sentry_sdk.consts import OP @@ -17,16 +18,10 @@ from typing import Iterator from typing import Tuple -try: - # support Django 1.9 - from django.template.base import Origin -except ImportError: - # backward compatibility - from django.template.loader import LoaderOrigin as Origin - -def get_template_frame_from_exception(exc_value): - # type: (Optional[BaseException]) -> Optional[Dict[str, Any]] +def get_template_frame_from_exception( + exc_value: Optional[BaseException], +) -> Optional[Dict[str, Any]]: # As of Django 1.9 or so the new template debug thing showed up. if hasattr(exc_value, "template_debug"): @@ -48,8 +43,7 @@ def get_template_frame_from_exception(exc_value): return None -def _get_template_name_description(template_name): - # type: (str) -> str +def _get_template_name_description(template_name: str) -> str: if isinstance(template_name, (list, tuple)): if template_name: return "[{}, ...]".format(template_name[0]) @@ -57,8 +51,7 @@ def _get_template_name_description(template_name): return template_name -def patch_templates(): - # type: () -> None +def patch_templates() -> None: from django.template.response import SimpleTemplateResponse from sentry_sdk.integrations.django import DjangoIntegration @@ -66,28 +59,33 @@ def patch_templates(): @property # type: ignore @ensure_integration_enabled(DjangoIntegration, real_rendered_content.fget) - def rendered_content(self): - # type: (SimpleTemplateResponse) -> str + def rendered_content(self: SimpleTemplateResponse) -> str: with sentry_sdk.start_span( op=OP.TEMPLATE_RENDER, name=_get_template_name_description(self.template_name), origin=DjangoIntegration.origin, + only_as_child_span=True, ) as span: - span.set_data("context", self.context_data) + if isinstance(self.context_data, dict): + for k, v in self.context_data.items(): + span.set_attribute(f"context.{k}", v) return real_rendered_content.fget(self) SimpleTemplateResponse.rendered_content = rendered_content - if DJANGO_VERSION < (1, 7): - return import django.shortcuts real_render = django.shortcuts.render @functools.wraps(real_render) @ensure_integration_enabled(DjangoIntegration, real_render) - def render(request, template_name, context=None, *args, **kwargs): - # type: (django.http.HttpRequest, str, Optional[Dict[str, Any]], *Any, **Any) -> django.http.HttpResponse + def render( + request: django.http.HttpRequest, + template_name: str, + context: Optional[Dict[str, Any]] = None, + *args: Any, + **kwargs: Any, + ) -> django.http.HttpResponse: # Inject trace meta tags into template context context = context or {} @@ -100,15 +98,16 @@ def render(request, template_name, context=None, *args, **kwargs): op=OP.TEMPLATE_RENDER, name=_get_template_name_description(template_name), origin=DjangoIntegration.origin, + only_as_child_span=True, ) as span: - span.set_data("context", context) + for k, v in context.items(): + span.set_attribute(f"context.{k}", v) return real_render(request, template_name, context, *args, **kwargs) django.shortcuts.render = render -def _get_template_frame_from_debug(debug): - # type: (Dict[str, Any]) -> Dict[str, Any] +def _get_template_frame_from_debug(debug: Dict[str, Any]) -> Dict[str, Any]: if debug is None: return None @@ -139,8 +138,7 @@ def _get_template_frame_from_debug(debug): } -def _linebreak_iter(template_source): - # type: (str) -> Iterator[int] +def _linebreak_iter(template_source: str) -> Iterator[int]: yield 0 p = template_source.find("\n") while p >= 0: @@ -148,8 +146,9 @@ def _linebreak_iter(template_source): p = template_source.find("\n", p + 1) -def _get_template_frame_from_source(source): - # type: (Tuple[Origin, Tuple[int, int]]) -> Optional[Dict[str, Any]] +def _get_template_frame_from_source( + source: Tuple[Origin, Tuple[int, int]], +) -> Optional[Dict[str, Any]]: if not source: return None diff --git a/sentry_sdk/integrations/django/transactions.py b/sentry_sdk/integrations/django/transactions.py index 5a7d69f3c9..3fe81f2029 100644 --- a/sentry_sdk/integrations/django/transactions.py +++ b/sentry_sdk/integrations/django/transactions.py @@ -1,3 +1,5 @@ +from __future__ import annotations + """ Copied from raven-python. @@ -19,12 +21,7 @@ from typing import Union from re import Pattern -from django import VERSION as DJANGO_VERSION - -if DJANGO_VERSION >= (2, 0): - from django.urls.resolvers import RoutePattern -else: - RoutePattern = None +from django.urls.resolvers import RoutePattern try: from django.urls import get_resolver @@ -32,8 +29,7 @@ from django.core.urlresolvers import get_resolver -def get_regex(resolver_or_pattern): - # type: (Union[URLPattern, URLResolver]) -> Pattern[str] +def get_regex(resolver_or_pattern: Union[URLPattern, URLResolver]) -> Pattern[str]: """Utility method for django's deprecated resolver.regex""" try: regex = resolver_or_pattern.regex @@ -53,10 +49,9 @@ class RavenResolver: _either_option_matcher = re.compile(r"\[([^\]]+)\|([^\]]+)\]") _camel_re = re.compile(r"([A-Z]+)([a-z])") - _cache = {} # type: Dict[URLPattern, str] + _cache: Dict[URLPattern, str] = {} - def _simplify(self, pattern): - # type: (Union[URLPattern, URLResolver]) -> str + def _simplify(self, pattern: Union[URLPattern, URLResolver]) -> str: r""" Clean up urlpattern regexes into something readable by humans: @@ -107,8 +102,12 @@ def _simplify(self, pattern): return result - def _resolve(self, resolver, path, parents=None): - # type: (URLResolver, str, Optional[List[URLResolver]]) -> Optional[str] + def _resolve( + self, + resolver: URLResolver, + path: str, + parents: Optional[List[URLResolver]] = None, + ) -> Optional[str]: match = get_regex(resolver).search(path) # Django < 2.0 @@ -147,10 +146,11 @@ def _resolve(self, resolver, path, parents=None): def resolve( self, - path, # type: str - urlconf=None, # type: Union[None, Tuple[URLPattern, URLPattern, URLResolver], Tuple[URLPattern]] - ): - # type: (...) -> Optional[str] + path: str, + urlconf: Union[ + None, Tuple[URLPattern, URLPattern, URLResolver], Tuple[URLPattern] + ] = None, + ) -> Optional[str]: resolver = get_resolver(urlconf) match = self._resolve(resolver, path) return match diff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py index 0a9861a6a6..cd5a495a8f 100644 --- a/sentry_sdk/integrations/django/views.py +++ b/sentry_sdk/integrations/django/views.py @@ -1,3 +1,4 @@ +from __future__ import annotations import functools import sentry_sdk @@ -21,8 +22,7 @@ wrap_async_view = None # type: ignore -def patch_views(): - # type: () -> None +def patch_views() -> None: from django.core.handlers.base import BaseHandler from django.template.response import SimpleTemplateResponse @@ -31,18 +31,18 @@ def patch_views(): old_make_view_atomic = BaseHandler.make_view_atomic old_render = SimpleTemplateResponse.render - def sentry_patched_render(self): - # type: (SimpleTemplateResponse) -> Any + @functools.wraps(old_render) + def sentry_patched_render(self: SimpleTemplateResponse) -> Any: with sentry_sdk.start_span( op=OP.VIEW_RESPONSE_RENDER, name="serialize response", origin=DjangoIntegration.origin, + only_as_child_span=True, ): return old_render(self) @functools.wraps(old_make_view_atomic) - def sentry_patched_make_view_atomic(self, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any + def sentry_patched_make_view_atomic(self: Any, *args: Any, **kwargs: Any) -> Any: callback = old_make_view_atomic(self, *args, **kwargs) # XXX: The wrapper function is created for every request. Find more @@ -69,16 +69,14 @@ def sentry_patched_make_view_atomic(self, *args, **kwargs): BaseHandler.make_view_atomic = sentry_patched_make_view_atomic -def _wrap_sync_view(callback): - # type: (Any) -> Any +def _wrap_sync_view(callback: Any) -> Any: from sentry_sdk.integrations.django import DjangoIntegration @functools.wraps(callback) - def sentry_wrapped_callback(request, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any + def sentry_wrapped_callback(request: Any, *args: Any, **kwargs: Any) -> Any: current_scope = sentry_sdk.get_current_scope() - if current_scope.transaction is not None: - current_scope.transaction.update_active_thread() + if current_scope.root_span is not None: + current_scope.root_span.update_active_thread() sentry_scope = sentry_sdk.get_isolation_scope() # set the active thread id to the handler thread for sync views @@ -90,6 +88,7 @@ def sentry_wrapped_callback(request, *args, **kwargs): op=OP.VIEW_RENDER, name=request.resolver_match.view_name, origin=DjangoIntegration.origin, + only_as_child_span=True, ): return callback(request, *args, **kwargs) diff --git a/sentry_sdk/integrations/dramatiq.py b/sentry_sdk/integrations/dramatiq.py index a756b4c669..76abf243bc 100644 --- a/sentry_sdk/integrations/dramatiq.py +++ b/sentry_sdk/integrations/dramatiq.py @@ -1,3 +1,4 @@ +from __future__ import annotations import json import sentry_sdk @@ -36,17 +37,14 @@ class DramatiqIntegration(Integration): identifier = "dramatiq" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: _patch_dramatiq_broker() -def _patch_dramatiq_broker(): - # type: () -> None +def _patch_dramatiq_broker() -> None: original_broker__init__ = Broker.__init__ - def sentry_patched_broker__init__(self, *args, **kw): - # type: (Broker, *Any, **Any) -> None + def sentry_patched_broker__init__(self: Broker, *args: Any, **kw: Any) -> None: integration = sentry_sdk.get_client().get_integration(DramatiqIntegration) try: @@ -85,8 +83,7 @@ class SentryMiddleware(Middleware): # type: ignore[misc] DramatiqIntegration. """ - def before_process_message(self, broker, message): - # type: (Broker, Message) -> None + def before_process_message(self, broker: Broker, message: Message) -> None: integration = sentry_sdk.get_client().get_integration(DramatiqIntegration) if integration is None: return @@ -99,8 +96,14 @@ def before_process_message(self, broker, message): scope.set_extra("dramatiq_message_id", message.message_id) scope.add_event_processor(_make_message_event_processor(message, integration)) - def after_process_message(self, broker, message, *, result=None, exception=None): - # type: (Broker, Message, Any, Optional[Any], Optional[Exception]) -> None + def after_process_message( + self: Broker, + broker: Message, + message: Any, + *, + result: Optional[Any] = None, + exception: Optional[Exception] = None, + ) -> None: integration = sentry_sdk.get_client().get_integration(DramatiqIntegration) if integration is None: return @@ -127,11 +130,11 @@ def after_process_message(self, broker, message, *, result=None, exception=None) message._scope_manager.__exit__(None, None, None) -def _make_message_event_processor(message, integration): - # type: (Message, DramatiqIntegration) -> Callable[[Event, Hint], Optional[Event]] +def _make_message_event_processor( + message: Message, integration: DramatiqIntegration +) -> Callable[[Event, Hint], Optional[Event]]: - def inner(event, hint): - # type: (Event, Hint) -> Optional[Event] + def inner(event: Event, hint: Hint) -> Optional[Event]: with capture_internal_exceptions(): DramatiqMessageExtractor(message).extract_into_event(event) @@ -141,16 +144,13 @@ def inner(event, hint): class DramatiqMessageExtractor: - def __init__(self, message): - # type: (Message) -> None + def __init__(self, message: Message) -> None: self.message_data = dict(message.asdict()) - def content_length(self): - # type: () -> int + def content_length(self) -> int: return len(json.dumps(self.message_data)) - def extract_into_event(self, event): - # type: (Event) -> None + def extract_into_event(self, event: Event) -> None: client = sentry_sdk.get_client() if not client.is_active(): return @@ -159,7 +159,7 @@ def extract_into_event(self, event): request_info = contexts.setdefault("dramatiq", {}) request_info["type"] = "dramatiq" - data = None # type: Optional[Union[AnnotatedValue, Dict[str, Any]]] + data: Optional[Union[AnnotatedValue, Dict[str, Any]]] = None if not request_body_within_bounds(client, self.content_length()): data = AnnotatedValue.removed_because_over_size_limit() else: diff --git a/sentry_sdk/integrations/excepthook.py b/sentry_sdk/integrations/excepthook.py index 61c7e460bf..ad3f7a82b6 100644 --- a/sentry_sdk/integrations/excepthook.py +++ b/sentry_sdk/integrations/excepthook.py @@ -1,3 +1,4 @@ +from __future__ import annotations import sys import sentry_sdk @@ -28,8 +29,7 @@ class ExcepthookIntegration(Integration): always_run = False - def __init__(self, always_run=False): - # type: (bool) -> None + def __init__(self, always_run: bool = False) -> None: if not isinstance(always_run, bool): raise ValueError( @@ -39,15 +39,16 @@ def __init__(self, always_run=False): self.always_run = always_run @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: sys.excepthook = _make_excepthook(sys.excepthook) -def _make_excepthook(old_excepthook): - # type: (Excepthook) -> Excepthook - def sentry_sdk_excepthook(type_, value, traceback): - # type: (Type[BaseException], BaseException, Optional[TracebackType]) -> None +def _make_excepthook(old_excepthook: Excepthook) -> Excepthook: + def sentry_sdk_excepthook( + type_: Type[BaseException], + value: BaseException, + traceback: Optional[TracebackType], + ) -> None: integration = sentry_sdk.get_client().get_integration(ExcepthookIntegration) # Note: If we replace this with ensure_integration_enabled then @@ -70,8 +71,7 @@ def sentry_sdk_excepthook(type_, value, traceback): return sentry_sdk_excepthook -def _should_send(always_run=False): - # type: (bool) -> bool +def _should_send(always_run: bool = False) -> bool: if always_run: return True diff --git a/sentry_sdk/integrations/executing.py b/sentry_sdk/integrations/executing.py index 6e68b8c0c7..649af64e58 100644 --- a/sentry_sdk/integrations/executing.py +++ b/sentry_sdk/integrations/executing.py @@ -1,3 +1,4 @@ +from __future__ import annotations import sentry_sdk from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.scope import add_global_event_processor @@ -20,12 +21,10 @@ class ExecutingIntegration(Integration): identifier = "executing" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: @add_global_event_processor - def add_executing_info(event, hint): - # type: (Event, Optional[Hint]) -> Optional[Event] + def add_executing_info(event: Event, hint: Optional[Hint]) -> Optional[Event]: if sentry_sdk.get_client().get_integration(ExecutingIntegration) is None: return event diff --git a/sentry_sdk/integrations/falcon.py b/sentry_sdk/integrations/falcon.py index ddedcb10de..622f8bb3a0 100644 --- a/sentry_sdk/integrations/falcon.py +++ b/sentry_sdk/integrations/falcon.py @@ -1,8 +1,9 @@ +from __future__ import annotations import sentry_sdk +from sentry_sdk.consts import SOURCE_FOR_STYLE from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import RequestExtractor from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware -from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, @@ -19,8 +20,6 @@ from sentry_sdk._types import Event, EventProcessor -# In Falcon 3.0 `falcon.api_helpers` is renamed to `falcon.app_helpers` -# and `falcon.API` to `falcon.App` try: import falcon # type: ignore @@ -29,45 +28,31 @@ except ImportError: raise DidNotEnable("Falcon not installed") -try: - import falcon.app_helpers # type: ignore - - falcon_helpers = falcon.app_helpers - falcon_app_class = falcon.App - FALCON3 = True -except ImportError: - import falcon.api_helpers # type: ignore +import falcon.app_helpers # type: ignore - falcon_helpers = falcon.api_helpers - falcon_app_class = falcon.API - FALCON3 = False +falcon_helpers = falcon.app_helpers +falcon_app_class = falcon.App -_FALCON_UNSET = None # type: Optional[object] -if FALCON3: # falcon.request._UNSET is only available in Falcon 3.0+ - with capture_internal_exceptions(): - from falcon.request import _UNSET as _FALCON_UNSET # type: ignore[import-not-found, no-redef] +_FALCON_UNSET: Optional[object] = None +with capture_internal_exceptions(): + from falcon.request import _UNSET as _FALCON_UNSET # type: ignore[import-not-found, no-redef] class FalconRequestExtractor(RequestExtractor): - def env(self): - # type: () -> Dict[str, Any] + def env(self) -> Dict[str, Any]: return self.request.env - def cookies(self): - # type: () -> Dict[str, Any] + def cookies(self) -> Dict[str, Any]: return self.request.cookies - def form(self): - # type: () -> None + def form(self) -> None: return None # No such concept in Falcon - def files(self): - # type: () -> None + def files(self) -> None: return None # No such concept in Falcon - def raw_data(self): - # type: () -> Optional[str] + def raw_data(self) -> Optional[str]: # As request data can only be read once we won't make this available # to Sentry. Just send back a dummy string in case there was a @@ -79,8 +64,7 @@ def raw_data(self): else: return None - def json(self): - # type: () -> Optional[Dict[str, Any]] + def json(self) -> Optional[Dict[str, Any]]: # fallback to cached_media = None if self.request._media is not available cached_media = None with capture_internal_exceptions(): @@ -101,8 +85,7 @@ def json(self): class SentryFalconMiddleware: """Captures exceptions in Falcon requests and send to Sentry""" - def process_request(self, req, resp, *args, **kwargs): - # type: (Any, Any, *Any, **Any) -> None + def process_request(self, req: Any, resp: Any, *args: Any, **kwargs: Any) -> None: integration = sentry_sdk.get_client().get_integration(FalconIntegration) if integration is None: return @@ -121,8 +104,7 @@ class FalconIntegration(Integration): transaction_style = "" - def __init__(self, transaction_style="uri_template"): - # type: (str) -> None + def __init__(self, transaction_style: str = "uri_template") -> None: if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" @@ -131,8 +113,7 @@ def __init__(self, transaction_style="uri_template"): self.transaction_style = transaction_style @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: version = parse_version(FALCON_VERSION) _check_minimum_version(FalconIntegration, version) @@ -142,12 +123,10 @@ def setup_once(): _patch_prepare_middleware() -def _patch_wsgi_app(): - # type: () -> None +def _patch_wsgi_app() -> None: original_wsgi_app = falcon_app_class.__call__ - def sentry_patched_wsgi_app(self, env, start_response): - # type: (falcon.API, Any, Any) -> Any + def sentry_patched_wsgi_app(self: falcon.API, env: Any, start_response: Any) -> Any: integration = sentry_sdk.get_client().get_integration(FalconIntegration) if integration is None: return original_wsgi_app(self, env, start_response) @@ -162,13 +141,11 @@ def sentry_patched_wsgi_app(self, env, start_response): falcon_app_class.__call__ = sentry_patched_wsgi_app -def _patch_handle_exception(): - # type: () -> None +def _patch_handle_exception() -> None: original_handle_exception = falcon_app_class._handle_exception @ensure_integration_enabled(FalconIntegration, original_handle_exception) - def sentry_patched_handle_exception(self, *args): - # type: (falcon.API, *Any) -> Any + def sentry_patched_handle_exception(self: falcon.API, *args: Any) -> Any: # NOTE(jmagnusson): falcon 2.0 changed falcon.API._handle_exception # method signature from `(ex, req, resp, params)` to # `(req, resp, ex, params)` @@ -200,14 +177,12 @@ def sentry_patched_handle_exception(self, *args): falcon_app_class._handle_exception = sentry_patched_handle_exception -def _patch_prepare_middleware(): - # type: () -> None +def _patch_prepare_middleware() -> None: original_prepare_middleware = falcon_helpers.prepare_middleware def sentry_patched_prepare_middleware( - middleware=None, independent_middleware=False, asgi=False - ): - # type: (Any, Any, bool) -> Any + middleware: Any = None, independent_middleware: Any = False, asgi: bool = False + ) -> Any: if asgi: # We don't support ASGI Falcon apps, so we don't patch anything here return original_prepare_middleware(middleware, independent_middleware, asgi) @@ -223,8 +198,7 @@ def sentry_patched_prepare_middleware( falcon_helpers.prepare_middleware = sentry_patched_prepare_middleware -def _exception_leads_to_http_5xx(ex, response): - # type: (Exception, falcon.Response) -> bool +def _exception_leads_to_http_5xx(ex: Exception, response: falcon.Response) -> bool: is_server_error = isinstance(ex, falcon.HTTPError) and (ex.status or "").startswith( "5" ) @@ -232,23 +206,16 @@ def _exception_leads_to_http_5xx(ex, response): ex, (falcon.HTTPError, falcon.http_status.HTTPStatus) ) - # We only check the HTTP status on Falcon 3 because in Falcon 2, the status on the response - # at the stage where we capture it is listed as 200, even though we would expect to see a 500 - # status. Since at the time of this change, Falcon 2 is ca. 4 years old, we have decided to - # only perform this check on Falcon 3+, despite the risk that some handled errors might be - # reported to Sentry as unhandled on Falcon 2. - return (is_server_error or is_unhandled_error) and ( - not FALCON3 or _has_http_5xx_status(response) - ) + return (is_server_error or is_unhandled_error) and _has_http_5xx_status(response) -def _has_http_5xx_status(response): - # type: (falcon.Response) -> bool +def _has_http_5xx_status(response: falcon.Response) -> bool: return response.status.startswith("5") -def _set_transaction_name_and_source(event, transaction_style, request): - # type: (Event, str, falcon.Request) -> None +def _set_transaction_name_and_source( + event: Event, transaction_style: str, request: falcon.Request +) -> None: name_for_style = { "uri_template": request.uri_template, "path": request.path, @@ -257,11 +224,11 @@ def _set_transaction_name_and_source(event, transaction_style, request): event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]} -def _make_request_event_processor(req, integration): - # type: (falcon.Request, FalconIntegration) -> EventProcessor +def _make_request_event_processor( + req: falcon.Request, integration: FalconIntegration +) -> EventProcessor: - def event_processor(event, hint): - # type: (Event, dict[str, Any]) -> Event + def event_processor(event: Event, hint: dict[str, Any]) -> Event: _set_transaction_name_and_source(event, integration.transaction_style, req) with capture_internal_exceptions(): diff --git a/sentry_sdk/integrations/fastapi.py b/sentry_sdk/integrations/fastapi.py index 76c6adee0f..10391fe934 100644 --- a/sentry_sdk/integrations/fastapi.py +++ b/sentry_sdk/integrations/fastapi.py @@ -1,11 +1,12 @@ +from __future__ import annotations import asyncio from copy import deepcopy from functools import wraps import sentry_sdk +from sentry_sdk.consts import SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.integrations import DidNotEnable from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.utils import ( transaction_from_function, logger, @@ -38,13 +39,13 @@ class FastApiIntegration(StarletteIntegration): identifier = "fastapi" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: patch_get_request_handler() -def _set_transaction_name_and_source(scope, transaction_style, request): - # type: (sentry_sdk.Scope, str, Any) -> None +def _set_transaction_name_and_source( + scope: sentry_sdk.Scope, transaction_style: str, request: Any +) -> None: name = "" if transaction_style == "endpoint": @@ -71,12 +72,10 @@ def _set_transaction_name_and_source(scope, transaction_style, request): ) -def patch_get_request_handler(): - # type: () -> None +def patch_get_request_handler() -> None: old_get_request_handler = fastapi.routing.get_request_handler - def _sentry_get_request_handler(*args, **kwargs): - # type: (*Any, **Any) -> Any + def _sentry_get_request_handler(*args: Any, **kwargs: Any) -> Any: dependant = kwargs.get("dependant") if ( dependant @@ -86,11 +85,10 @@ def _sentry_get_request_handler(*args, **kwargs): old_call = dependant.call @wraps(old_call) - def _sentry_call(*args, **kwargs): - # type: (*Any, **Any) -> Any + def _sentry_call(*args: Any, **kwargs: Any) -> Any: current_scope = sentry_sdk.get_current_scope() - if current_scope.transaction is not None: - current_scope.transaction.update_active_thread() + if current_scope.root_span is not None: + current_scope.root_span.update_active_thread() sentry_scope = sentry_sdk.get_isolation_scope() if sentry_scope.profile is not None: @@ -102,8 +100,7 @@ def _sentry_call(*args, **kwargs): old_app = old_get_request_handler(*args, **kwargs) - async def _sentry_app(*args, **kwargs): - # type: (*Any, **Any) -> Any + async def _sentry_app(*args: Any, **kwargs: Any) -> Any: integration = sentry_sdk.get_client().get_integration(FastApiIntegration) if integration is None: return await old_app(*args, **kwargs) @@ -117,10 +114,10 @@ async def _sentry_app(*args, **kwargs): extractor = StarletteRequestExtractor(request) info = await extractor.extract_request_info() - def _make_request_event_processor(req, integration): - # type: (Any, Any) -> Callable[[Event, Dict[str, Any]], Event] - def event_processor(event, hint): - # type: (Event, Dict[str, Any]) -> Event + def _make_request_event_processor( + req: Any, integration: Any + ) -> Callable[[Event, Dict[str, Any]], Event]: + def event_processor(event: Event, hint: Dict[str, Any]) -> Event: # Extract information from request request_info = event.get("request", {}) diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py index f45ec6db20..708bcd01f9 100644 --- a/sentry_sdk/integrations/flask.py +++ b/sentry_sdk/integrations/flask.py @@ -1,4 +1,6 @@ +from __future__ import annotations import sentry_sdk +from sentry_sdk.consts import SOURCE_FOR_STYLE from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration from sentry_sdk.integrations._wsgi_common import ( DEFAULT_HTTP_METHODS_TO_CAPTURE, @@ -6,7 +8,6 @@ ) from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, @@ -57,10 +58,9 @@ class FlaskIntegration(Integration): def __init__( self, - transaction_style="endpoint", # type: str - http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: tuple[str, ...] - ): - # type: (...) -> None + transaction_style: str = "endpoint", + http_methods_to_capture: tuple[str, ...] = DEFAULT_HTTP_METHODS_TO_CAPTURE, + ) -> None: if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" @@ -70,8 +70,7 @@ def __init__( self.http_methods_to_capture = tuple(map(str.upper, http_methods_to_capture)) @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: try: from quart import Quart # type: ignore @@ -93,8 +92,9 @@ def setup_once(): old_app = Flask.__call__ - def sentry_patched_wsgi_app(self, environ, start_response): - # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse + def sentry_patched_wsgi_app( + self: Any, environ: Dict[str, str], start_response: Callable[..., Any] + ) -> _ScopedResponse: if sentry_sdk.get_client().get_integration(FlaskIntegration) is None: return old_app(self, environ, start_response) @@ -114,8 +114,9 @@ def sentry_patched_wsgi_app(self, environ, start_response): Flask.__call__ = sentry_patched_wsgi_app -def _add_sentry_trace(sender, template, context, **extra): - # type: (Flask, Any, Dict[str, Any], **Any) -> None +def _add_sentry_trace( + sender: Flask, template: Any, context: Dict[str, Any], **extra: Any +) -> None: if "sentry_trace" in context: return @@ -125,8 +126,9 @@ def _add_sentry_trace(sender, template, context, **extra): context["sentry_trace_meta"] = trace_meta -def _set_transaction_name_and_source(scope, transaction_style, request): - # type: (sentry_sdk.Scope, str, Request) -> None +def _set_transaction_name_and_source( + scope: sentry_sdk.Scope, transaction_style: str, request: Request +) -> None: try: name_for_style = { "url": request.url_rule.rule, @@ -140,8 +142,7 @@ def _set_transaction_name_and_source(scope, transaction_style, request): pass -def _request_started(app, **kwargs): - # type: (Flask, **Any) -> None +def _request_started(app: Flask, **kwargs: Any) -> None: integration = sentry_sdk.get_client().get_integration(FlaskIntegration) if integration is None: return @@ -160,47 +161,39 @@ def _request_started(app, **kwargs): class FlaskRequestExtractor(RequestExtractor): - def env(self): - # type: () -> Dict[str, str] + def env(self) -> Dict[str, str]: return self.request.environ - def cookies(self): - # type: () -> Dict[Any, Any] + def cookies(self) -> Dict[Any, Any]: return { k: v[0] if isinstance(v, list) and len(v) == 1 else v for k, v in self.request.cookies.items() } - def raw_data(self): - # type: () -> bytes + def raw_data(self) -> bytes: return self.request.get_data() - def form(self): - # type: () -> ImmutableMultiDict[str, Any] + def form(self) -> ImmutableMultiDict[str, Any]: return self.request.form - def files(self): - # type: () -> ImmutableMultiDict[str, Any] + def files(self) -> ImmutableMultiDict[str, Any]: return self.request.files - def is_json(self): - # type: () -> bool + def is_json(self) -> bool: return self.request.is_json - def json(self): - # type: () -> Any + def json(self) -> Any: return self.request.get_json(silent=True) - def size_of_file(self, file): - # type: (FileStorage) -> int + def size_of_file(self, file: FileStorage) -> int: return file.content_length -def _make_request_event_processor(app, request, integration): - # type: (Flask, Callable[[], Request], FlaskIntegration) -> EventProcessor +def _make_request_event_processor( + app: Flask, request: Callable[[], Request], integration: FlaskIntegration +) -> EventProcessor: - def inner(event, hint): - # type: (Event, dict[str, Any]) -> Event + def inner(event: Event, hint: dict[str, Any]) -> Event: # if the request is gone we are fine not logging the data from # it. This might happen if the processor is pushed away to @@ -221,8 +214,9 @@ def inner(event, hint): @ensure_integration_enabled(FlaskIntegration) -def _capture_exception(sender, exception, **kwargs): - # type: (Flask, Union[ValueError, BaseException], **Any) -> None +def _capture_exception( + sender: Flask, exception: Union[ValueError, BaseException], **kwargs: Any +) -> None: event, hint = event_from_exception( exception, client_options=sentry_sdk.get_client().options, @@ -232,8 +226,7 @@ def _capture_exception(sender, exception, **kwargs): sentry_sdk.capture_event(event, hint=hint) -def _add_user_to_event(event): - # type: (Event) -> None +def _add_user_to_event(event: Event) -> None: if flask_login is None: return diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py index c637b7414a..a347ce3ffe 100644 --- a/sentry_sdk/integrations/gcp.py +++ b/sentry_sdk/integrations/gcp.py @@ -1,3 +1,4 @@ +from __future__ import annotations import functools import sys from copy import deepcopy @@ -5,10 +6,12 @@ from os import environ import sentry_sdk -from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.integrations import Integration -from sentry_sdk.integrations._wsgi_common import _filter_headers +from sentry_sdk.integrations._wsgi_common import ( + _filter_headers, + _request_headers_to_span_attributes, +) from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import TransactionSource from sentry_sdk.utils import ( @@ -37,11 +40,11 @@ F = TypeVar("F", bound=Callable[..., Any]) -def _wrap_func(func): - # type: (F) -> F +def _wrap_func(func: F) -> F: @functools.wraps(func) - def sentry_func(functionhandler, gcp_event, *args, **kwargs): - # type: (Any, Any, *Any, **Any) -> Any + def sentry_func( + functionhandler: Any, gcp_event: Any, *args: Any, **kwargs: Any + ) -> Any: client = sentry_sdk.get_client() integration = client.get_integration(GcpIntegration) @@ -84,42 +87,30 @@ def sentry_func(functionhandler, gcp_event, *args, **kwargs): if hasattr(gcp_event, "headers"): headers = gcp_event.headers - transaction = continue_trace( - headers, - op=OP.FUNCTION_GCP, - name=environ.get("FUNCTION_NAME", ""), - source=TransactionSource.COMPONENT, - origin=GcpIntegration.origin, - ) - sampling_context = { - "gcp_env": { - "function_name": environ.get("FUNCTION_NAME"), - "function_entry_point": environ.get("ENTRY_POINT"), - "function_identity": environ.get("FUNCTION_IDENTITY"), - "function_region": environ.get("FUNCTION_REGION"), - "function_project": environ.get("GCP_PROJECT"), - }, - "gcp_event": gcp_event, - } - with sentry_sdk.start_transaction( - transaction, custom_sampling_context=sampling_context - ): - try: - return func(functionhandler, gcp_event, *args, **kwargs) - except Exception: - exc_info = sys.exc_info() - sentry_event, hint = event_from_exception( - exc_info, - client_options=client.options, - mechanism={"type": "gcp", "handled": False}, - ) - sentry_sdk.capture_event(sentry_event, hint=hint) - reraise(*exc_info) - finally: - if timeout_thread: - timeout_thread.stop() - # Flush out the event queue - client.flush() + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span( + op=OP.FUNCTION_GCP, + name=environ.get("FUNCTION_NAME", ""), + source=TransactionSource.COMPONENT, + origin=GcpIntegration.origin, + attributes=_prepopulate_attributes(gcp_event), + ): + try: + return func(functionhandler, gcp_event, *args, **kwargs) + except Exception: + exc_info = sys.exc_info() + sentry_event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "gcp", "handled": False}, + ) + sentry_sdk.capture_event(sentry_event, hint=hint) + reraise(*exc_info) + finally: + if timeout_thread: + timeout_thread.stop() + # Flush out the event queue + client.flush() return sentry_func # type: ignore @@ -128,13 +119,11 @@ class GcpIntegration(Integration): identifier = "gcp" origin = f"auto.function.{identifier}" - def __init__(self, timeout_warning=False): - # type: (bool) -> None + def __init__(self, timeout_warning: bool = False) -> None: self.timeout_warning = timeout_warning @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: import __main__ as gcp_functions if not hasattr(gcp_functions, "worker_v1"): @@ -150,11 +139,11 @@ def setup_once(): ) -def _make_request_event_processor(gcp_event, configured_timeout, initial_time): - # type: (Any, Any, Any) -> EventProcessor +def _make_request_event_processor( + gcp_event: Any, configured_timeout: Any, initial_time: Any +) -> EventProcessor: - def event_processor(event, hint): - # type: (Event, Hint) -> Optional[Event] + def event_processor(event: Event, hint: Hint) -> Optional[Event]: final_time = datetime.now(timezone.utc) time_diff = final_time - initial_time @@ -205,8 +194,7 @@ def event_processor(event, hint): return event_processor -def _get_google_cloud_logs_url(final_time): - # type: (datetime) -> str +def _get_google_cloud_logs_url(final_time: datetime) -> str: """ Generates a Google Cloud Logs console URL based on the environment variables Arguments: @@ -232,3 +220,37 @@ def _get_google_cloud_logs_url(final_time): ) return url + + +ENV_TO_ATTRIBUTE = { + "FUNCTION_NAME": "faas.name", + "ENTRY_POINT": "gcp.function.entry_point", + "FUNCTION_IDENTITY": "gcp.function.identity", + "FUNCTION_REGION": "faas.region", + "GCP_PROJECT": "gcp.function.project", +} + +EVENT_TO_ATTRIBUTE = { + "method": "http.request.method", + "query_string": "url.query", +} + + +def _prepopulate_attributes(gcp_event: Any) -> dict[str, Any]: + attributes = { + "cloud.provider": "gcp", + } + + for key, attr in ENV_TO_ATTRIBUTE.items(): + if environ.get(key): + attributes[attr] = environ[key] + + for key, attr in EVENT_TO_ATTRIBUTE.items(): + if getattr(gcp_event, key, None): + attributes[attr] = getattr(gcp_event, key) + + if hasattr(gcp_event, "headers"): + headers = gcp_event.headers + attributes.update(_request_headers_to_span_attributes(headers)) + + return attributes diff --git a/sentry_sdk/integrations/gnu_backtrace.py b/sentry_sdk/integrations/gnu_backtrace.py index 21d8ea9b38..089ef51a0c 100644 --- a/sentry_sdk/integrations/gnu_backtrace.py +++ b/sentry_sdk/integrations/gnu_backtrace.py @@ -1,3 +1,4 @@ +from __future__ import annotations import re import sentry_sdk @@ -27,17 +28,14 @@ class GnuBacktraceIntegration(Integration): identifier = "gnu_backtrace" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: @add_global_event_processor - def process_gnu_backtrace(event, hint): - # type: (Event, dict[str, Any]) -> Event + def process_gnu_backtrace(event: Event, hint: dict[str, Any]) -> Event: with capture_internal_exceptions(): return _process_gnu_backtrace(event, hint) -def _process_gnu_backtrace(event, hint): - # type: (Event, dict[str, Any]) -> Event +def _process_gnu_backtrace(event: Event, hint: dict[str, Any]) -> Event: if sentry_sdk.get_client().get_integration(GnuBacktraceIntegration) is None: return event diff --git a/sentry_sdk/integrations/gql.py b/sentry_sdk/integrations/gql.py index 5f4436f5b2..a43f04a062 100644 --- a/sentry_sdk/integrations/gql.py +++ b/sentry_sdk/integrations/gql.py @@ -1,3 +1,4 @@ +from __future__ import annotations import sentry_sdk from sentry_sdk.utils import ( event_from_exception, @@ -34,19 +35,17 @@ class GQLIntegration(Integration): identifier = "gql" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: gql_version = parse_version(gql.__version__) _check_minimum_version(GQLIntegration, gql_version) _patch_execute() -def _data_from_document(document): - # type: (DocumentNode) -> EventDataType +def _data_from_document(document: DocumentNode) -> EventDataType: try: operation_ast = get_operation_ast(document) - data = {"query": print_ast(document)} # type: EventDataType + data: EventDataType = {"query": print_ast(document)} if operation_ast is not None: data["variables"] = operation_ast.variable_definitions @@ -58,8 +57,7 @@ def _data_from_document(document): return dict() -def _transport_method(transport): - # type: (Union[Transport, AsyncTransport]) -> str +def _transport_method(transport: Union[Transport, AsyncTransport]) -> str: """ The RequestsHTTPTransport allows defining the HTTP method; all other transports use POST. @@ -70,8 +68,9 @@ def _transport_method(transport): return "POST" -def _request_info_from_transport(transport): - # type: (Union[Transport, AsyncTransport, None]) -> Dict[str, str] +def _request_info_from_transport( + transport: Union[Transport, AsyncTransport, None], +) -> Dict[str, str]: if transport is None: return {} @@ -87,13 +86,13 @@ def _request_info_from_transport(transport): return request_info -def _patch_execute(): - # type: () -> None +def _patch_execute() -> None: real_execute = gql.Client.execute @ensure_integration_enabled(GQLIntegration, real_execute) - def sentry_patched_execute(self, document, *args, **kwargs): - # type: (gql.Client, DocumentNode, Any, Any) -> Any + def sentry_patched_execute( + self: gql.Client, document: DocumentNode, *args: Any, **kwargs: Any + ) -> Any: scope = sentry_sdk.get_isolation_scope() scope.add_event_processor(_make_gql_event_processor(self, document)) @@ -112,10 +111,10 @@ def sentry_patched_execute(self, document, *args, **kwargs): gql.Client.execute = sentry_patched_execute -def _make_gql_event_processor(client, document): - # type: (gql.Client, DocumentNode) -> EventProcessor - def processor(event, hint): - # type: (Event, dict[str, Any]) -> Event +def _make_gql_event_processor( + client: gql.Client, document: DocumentNode +) -> EventProcessor: + def processor(event: Event, hint: dict[str, Any]) -> Event: try: errors = hint["exc_info"][1].errors except (AttributeError, KeyError): diff --git a/sentry_sdk/integrations/graphene.py b/sentry_sdk/integrations/graphene.py index 00a8d155d4..6b1a6bd582 100644 --- a/sentry_sdk/integrations/graphene.py +++ b/sentry_sdk/integrations/graphene.py @@ -1,3 +1,4 @@ +from __future__ import annotations from contextlib import contextmanager import sentry_sdk @@ -31,22 +32,21 @@ class GrapheneIntegration(Integration): identifier = "graphene" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: version = package_version("graphene") _check_minimum_version(GrapheneIntegration, version) _patch_graphql() -def _patch_graphql(): - # type: () -> None +def _patch_graphql() -> None: old_graphql_sync = graphene_schema.graphql_sync old_graphql_async = graphene_schema.graphql @ensure_integration_enabled(GrapheneIntegration, old_graphql_sync) - def _sentry_patched_graphql_sync(schema, source, *args, **kwargs): - # type: (GraphQLSchema, Union[str, Source], Any, Any) -> ExecutionResult + def _sentry_patched_graphql_sync( + schema: GraphQLSchema, source: Union[str, Source], *args: Any, **kwargs: Any + ) -> ExecutionResult: scope = sentry_sdk.get_isolation_scope() scope.add_event_processor(_event_processor) @@ -68,8 +68,9 @@ def _sentry_patched_graphql_sync(schema, source, *args, **kwargs): return result - async def _sentry_patched_graphql_async(schema, source, *args, **kwargs): - # type: (GraphQLSchema, Union[str, Source], Any, Any) -> ExecutionResult + async def _sentry_patched_graphql_async( + schema: GraphQLSchema, source: Union[str, Source], *args: Any, **kwargs: Any + ) -> ExecutionResult: integration = sentry_sdk.get_client().get_integration(GrapheneIntegration) if integration is None: return await old_graphql_async(schema, source, *args, **kwargs) @@ -99,8 +100,7 @@ async def _sentry_patched_graphql_async(schema, source, *args, **kwargs): graphene_schema.graphql = _sentry_patched_graphql_async -def _event_processor(event, hint): - # type: (Event, Dict[str, Any]) -> Event +def _event_processor(event: Event, hint: Dict[str, Any]) -> Event: if should_send_default_pii(): request_info = event.setdefault("request", {}) request_info["api_target"] = "graphql" @@ -112,8 +112,9 @@ def _event_processor(event, hint): @contextmanager -def graphql_span(schema, source, kwargs): - # type: (GraphQLSchema, Union[str, Source], Dict[str, Any]) -> Generator[None, None, None] +def graphql_span( + schema: GraphQLSchema, source: Union[str, Source], kwargs: Dict[str, Any] +) -> Generator[None, None, None]: operation_name = kwargs.get("operation_name") operation_type = "query" @@ -135,17 +136,10 @@ def graphql_span(schema, source, kwargs): }, ) - scope = sentry_sdk.get_current_scope() - if scope.span: - _graphql_span = scope.span.start_child(op=op, name=operation_name) - else: - _graphql_span = sentry_sdk.start_span(op=op, name=operation_name) - - _graphql_span.set_data("graphql.document", source) - _graphql_span.set_data("graphql.operation.name", operation_name) - _graphql_span.set_data("graphql.operation.type", operation_type) - - try: + with sentry_sdk.start_span( + op=op, name=operation_name, only_as_child_span=True + ) as graphql_span: + graphql_span.set_attribute("graphql.document", source) + graphql_span.set_attribute("graphql.operation.name", operation_name) + graphql_span.set_attribute("graphql.operation.type", operation_type) yield - finally: - _graphql_span.finish() diff --git a/sentry_sdk/integrations/grpc/__init__.py b/sentry_sdk/integrations/grpc/__init__.py index 4e15f95ae5..29f6100a50 100644 --- a/sentry_sdk/integrations/grpc/__init__.py +++ b/sentry_sdk/integrations/grpc/__init__.py @@ -1,3 +1,4 @@ +from __future__ import annotations from functools import wraps import grpc @@ -130,10 +131,10 @@ def patched_aio_server( # type: ignore **kwargs: P.kwargs, ) -> Server: server_interceptor = AsyncServerInterceptor() - interceptors = [ + interceptors: Sequence[grpc.ServerInterceptor] = [ server_interceptor, *(interceptors or []), - ] # type: Sequence[grpc.ServerInterceptor] + ] try: # We prefer interceptors as a list because of compatibility with diff --git a/sentry_sdk/integrations/grpc/aio/client.py b/sentry_sdk/integrations/grpc/aio/client.py index ff3c213176..0c8623ca90 100644 --- a/sentry_sdk/integrations/grpc/aio/client.py +++ b/sentry_sdk/integrations/grpc/aio/client.py @@ -44,14 +44,17 @@ async def intercept_unary_unary( request: Message, ) -> Union[UnaryUnaryCall, Message]: method = client_call_details.method + if isinstance(method, bytes): + method = method.decode() with sentry_sdk.start_span( op=OP.GRPC_CLIENT, - name="unary unary call to %s" % method.decode(), + name="unary unary call to %s" % method, origin=SPAN_ORIGIN, + only_as_child_span=True, ) as span: - span.set_data("type", "unary unary") - span.set_data("method", method) + span.set_attribute("type", "unary unary") + span.set_attribute("method", method) client_call_details = self._update_client_call_details_metadata_from_scope( client_call_details @@ -59,7 +62,7 @@ async def intercept_unary_unary( response = await continuation(client_call_details, request) status_code = await response.code() - span.set_data("code", status_code.name) + span.set_attribute("code", status_code.name) return response @@ -74,14 +77,17 @@ async def intercept_unary_stream( request: Message, ) -> Union[AsyncIterable[Any], UnaryStreamCall]: method = client_call_details.method + if isinstance(method, bytes): + method = method.decode() with sentry_sdk.start_span( op=OP.GRPC_CLIENT, - name="unary stream call to %s" % method.decode(), + name="unary stream call to %s" % method, origin=SPAN_ORIGIN, + only_as_child_span=True, ) as span: - span.set_data("type", "unary stream") - span.set_data("method", method) + span.set_attribute("type", "unary stream") + span.set_attribute("method", method) client_call_details = self._update_client_call_details_metadata_from_scope( client_call_details @@ -89,6 +95,6 @@ async def intercept_unary_stream( response = await continuation(client_call_details, request) # status_code = await response.code() - # span.set_data("code", status_code) + # span.set_attribute("code", status_code) return response diff --git a/sentry_sdk/integrations/grpc/aio/server.py b/sentry_sdk/integrations/grpc/aio/server.py index 381c63103e..2538b89252 100644 --- a/sentry_sdk/integrations/grpc/aio/server.py +++ b/sentry_sdk/integrations/grpc/aio/server.py @@ -1,8 +1,9 @@ +from __future__ import annotations import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable from sentry_sdk.integrations.grpc.consts import SPAN_ORIGIN -from sentry_sdk.tracing import Transaction, TransactionSource +from sentry_sdk.tracing import TransactionSource from sentry_sdk.utils import event_from_exception from typing import TYPE_CHECKING @@ -21,14 +22,19 @@ class ServerInterceptor(grpc.aio.ServerInterceptor): # type: ignore - def __init__(self, find_name=None): - # type: (ServerInterceptor, Callable[[ServicerContext], str] | None) -> None + def __init__( + self: ServerInterceptor, + find_name: Callable[[ServicerContext], str] | None = None, + ) -> None: self._find_method_name = find_name or self._find_name super().__init__() - async def intercept_service(self, continuation, handler_call_details): - # type: (ServerInterceptor, Callable[[HandlerCallDetails], Awaitable[RpcMethodHandler]], HandlerCallDetails) -> Optional[Awaitable[RpcMethodHandler]] + async def intercept_service( + self: ServerInterceptor, + continuation: Callable[[HandlerCallDetails], Awaitable[RpcMethodHandler]], + handler_call_details: HandlerCallDetails, + ) -> Optional[Awaitable[RpcMethodHandler]]: self._handler_call_details = handler_call_details handler = await continuation(handler_call_details) if handler is None: @@ -37,55 +43,49 @@ async def intercept_service(self, continuation, handler_call_details): if not handler.request_streaming and not handler.response_streaming: handler_factory = grpc.unary_unary_rpc_method_handler - async def wrapped(request, context): - # type: (Any, ServicerContext) -> Any + async def wrapped(request: Any, context: ServicerContext) -> Any: name = self._find_method_name(context) if not name: return await handler(request, context) # What if the headers are empty? - transaction = Transaction.continue_from_headers( - dict(context.invocation_metadata()), - op=OP.GRPC_SERVER, - name=name, - source=TransactionSource.CUSTOM, - origin=SPAN_ORIGIN, - ) - - with sentry_sdk.start_transaction(transaction=transaction): - try: - return await handler.unary_unary(request, context) - except AbortError: - raise - except Exception as exc: - event, hint = event_from_exception( - exc, - mechanism={"type": "grpc", "handled": False}, - ) - sentry_sdk.capture_event(event, hint=hint) - raise + with sentry_sdk.continue_trace(dict(context.invocation_metadata())): + with sentry_sdk.start_span( + op=OP.GRPC_SERVER, + name=name, + source=TransactionSource.CUSTOM, + origin=SPAN_ORIGIN, + ): + try: + return await handler.unary_unary(request, context) + except AbortError: + raise + except Exception as exc: + event, hint = event_from_exception( + exc, + mechanism={"type": "grpc", "handled": False}, + ) + sentry_sdk.capture_event(event, hint=hint) + raise elif not handler.request_streaming and handler.response_streaming: handler_factory = grpc.unary_stream_rpc_method_handler - async def wrapped(request, context): # type: ignore - # type: (Any, ServicerContext) -> Any + async def wrapped(request: Any, context: ServicerContext) -> Any: # type: ignore async for r in handler.unary_stream(request, context): yield r elif handler.request_streaming and not handler.response_streaming: handler_factory = grpc.stream_unary_rpc_method_handler - async def wrapped(request, context): - # type: (Any, ServicerContext) -> Any + async def wrapped(request: Any, context: ServicerContext) -> Any: response = handler.stream_unary(request, context) return await response elif handler.request_streaming and handler.response_streaming: handler_factory = grpc.stream_stream_rpc_method_handler - async def wrapped(request, context): # type: ignore - # type: (Any, ServicerContext) -> Any + async def wrapped(request: Any, context: ServicerContext) -> Any: # type: ignore async for r in handler.stream_stream(request, context): yield r @@ -95,6 +95,5 @@ async def wrapped(request, context): # type: ignore response_serializer=handler.response_serializer, ) - def _find_name(self, context): - # type: (ServicerContext) -> str + def _find_name(self, context: ServicerContext) -> str: return self._handler_call_details.method diff --git a/sentry_sdk/integrations/grpc/client.py b/sentry_sdk/integrations/grpc/client.py index a5b4f9f52e..18f6db88ff 100644 --- a/sentry_sdk/integrations/grpc/client.py +++ b/sentry_sdk/integrations/grpc/client.py @@ -1,3 +1,4 @@ +from __future__ import annotations import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable @@ -23,54 +24,65 @@ class ClientInterceptor( ): _is_intercepted = False - def intercept_unary_unary(self, continuation, client_call_details, request): - # type: (ClientInterceptor, Callable[[ClientCallDetails, Message], _UnaryOutcome], ClientCallDetails, Message) -> _UnaryOutcome + def intercept_unary_unary( + self: ClientInterceptor, + continuation: Callable[[ClientCallDetails, Message], _UnaryOutcome], + client_call_details: ClientCallDetails, + request: Message, + ) -> _UnaryOutcome: method = client_call_details.method with sentry_sdk.start_span( op=OP.GRPC_CLIENT, name="unary unary call to %s" % method, origin=SPAN_ORIGIN, + only_as_child_span=True, ) as span: - span.set_data("type", "unary unary") - span.set_data("method", method) + span.set_attribute("type", "unary unary") + span.set_attribute("method", method) client_call_details = self._update_client_call_details_metadata_from_scope( client_call_details ) response = continuation(client_call_details, request) - span.set_data("code", response.code().name) + span.set_attribute("code", response.code().name) return response - def intercept_unary_stream(self, continuation, client_call_details, request): - # type: (ClientInterceptor, Callable[[ClientCallDetails, Message], Union[Iterable[Any], UnaryStreamCall]], ClientCallDetails, Message) -> Union[Iterator[Message], Call] + def intercept_unary_stream( + self: ClientInterceptor, + continuation: Callable[ + [ClientCallDetails, Message], Union[Iterable[Any], UnaryStreamCall] + ], + client_call_details: ClientCallDetails, + request: Message, + ) -> Union[Iterator[Message], Call]: method = client_call_details.method with sentry_sdk.start_span( op=OP.GRPC_CLIENT, name="unary stream call to %s" % method, origin=SPAN_ORIGIN, + only_as_child_span=True, ) as span: - span.set_data("type", "unary stream") - span.set_data("method", method) + span.set_attribute("type", "unary stream") + span.set_attribute("method", method) client_call_details = self._update_client_call_details_metadata_from_scope( client_call_details ) - response = continuation( - client_call_details, request - ) # type: UnaryStreamCall + response: UnaryStreamCall = continuation(client_call_details, request) # Setting code on unary-stream leads to execution getting stuck - # span.set_data("code", response.code().name) + # span.set_attribute("code", response.code().name) return response @staticmethod - def _update_client_call_details_metadata_from_scope(client_call_details): - # type: (ClientCallDetails) -> ClientCallDetails + def _update_client_call_details_metadata_from_scope( + client_call_details: ClientCallDetails, + ) -> ClientCallDetails: metadata = ( list(client_call_details.metadata) if client_call_details.metadata else [] ) diff --git a/sentry_sdk/integrations/grpc/consts.py b/sentry_sdk/integrations/grpc/consts.py index 9fdb975caf..6ee9ed49ca 100644 --- a/sentry_sdk/integrations/grpc/consts.py +++ b/sentry_sdk/integrations/grpc/consts.py @@ -1 +1,3 @@ +from __future__ import annotations + SPAN_ORIGIN = "auto.grpc.grpc" diff --git a/sentry_sdk/integrations/grpc/server.py b/sentry_sdk/integrations/grpc/server.py index 0d2792d1b7..2407bfecbe 100644 --- a/sentry_sdk/integrations/grpc/server.py +++ b/sentry_sdk/integrations/grpc/server.py @@ -1,8 +1,9 @@ +from __future__ import annotations import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable from sentry_sdk.integrations.grpc.consts import SPAN_ORIGIN -from sentry_sdk.tracing import Transaction, TransactionSource +from sentry_sdk.tracing import TransactionSource from typing import TYPE_CHECKING @@ -18,39 +19,41 @@ class ServerInterceptor(grpc.ServerInterceptor): # type: ignore - def __init__(self, find_name=None): - # type: (ServerInterceptor, Optional[Callable[[ServicerContext], str]]) -> None + def __init__( + self: ServerInterceptor, + find_name: Optional[Callable[[ServicerContext], str]] = None, + ) -> None: self._find_method_name = find_name or ServerInterceptor._find_name super().__init__() - def intercept_service(self, continuation, handler_call_details): - # type: (ServerInterceptor, Callable[[HandlerCallDetails], RpcMethodHandler], HandlerCallDetails) -> RpcMethodHandler + def intercept_service( + self: ServerInterceptor, + continuation: Callable[[HandlerCallDetails], RpcMethodHandler], + handler_call_details: HandlerCallDetails, + ) -> RpcMethodHandler: handler = continuation(handler_call_details) if not handler or not handler.unary_unary: return handler - def behavior(request, context): - # type: (Message, ServicerContext) -> Message + def behavior(request: Message, context: ServicerContext) -> Message: with sentry_sdk.isolation_scope(): name = self._find_method_name(context) if name: metadata = dict(context.invocation_metadata()) - transaction = Transaction.continue_from_headers( - metadata, - op=OP.GRPC_SERVER, - name=name, - source=TransactionSource.CUSTOM, - origin=SPAN_ORIGIN, - ) - - with sentry_sdk.start_transaction(transaction=transaction): - try: - return handler.unary_unary(request, context) - except BaseException as e: - raise e + with sentry_sdk.continue_trace(metadata): + with sentry_sdk.start_span( + op=OP.GRPC_SERVER, + name=name, + source=TransactionSource.CUSTOM, + origin=SPAN_ORIGIN, + ): + try: + return handler.unary_unary(request, context) + except BaseException as e: + raise e else: return handler.unary_unary(request, context) @@ -61,6 +64,5 @@ def behavior(request, context): ) @staticmethod - def _find_name(context): - # type: (ServicerContext) -> str + def _find_name(context: ServicerContext) -> str: return context._rpc_event.call_details.method.decode() diff --git a/sentry_sdk/integrations/httpx.py b/sentry_sdk/integrations/httpx.py index 2ddd44489f..2d5d4eb9ae 100644 --- a/sentry_sdk/integrations/httpx.py +++ b/sentry_sdk/integrations/httpx.py @@ -1,14 +1,16 @@ +from __future__ import annotations import sentry_sdk -from sentry_sdk.consts import OP, SPANDATA +from sentry_sdk.consts import OP, SPANDATA, BAGGAGE_HEADER_NAME from sentry_sdk.integrations import Integration, DidNotEnable -from sentry_sdk.tracing import BAGGAGE_HEADER_NAME from sentry_sdk.tracing_utils import Baggage, should_propagate_trace from sentry_sdk.utils import ( SENSITIVE_DATA_SUBSTITUTE, capture_internal_exceptions, ensure_integration_enabled, + http_client_status_to_breadcrumb_level, logger, parse_url, + set_thread_info_from_span, ) from typing import TYPE_CHECKING @@ -31,8 +33,7 @@ class HttpxIntegration(Integration): origin = f"auto.http.{identifier}" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: """ httpx has its own transport layer and can be customized when needed, so patch Client.send and AsyncClient.send to support both synchronous and async interfaces. @@ -41,13 +42,11 @@ def setup_once(): _install_httpx_async_client() -def _install_httpx_client(): - # type: () -> None +def _install_httpx_client() -> None: real_send = Client.send @ensure_integration_enabled(HttpxIntegration, real_send) - def send(self, request, **kwargs): - # type: (Client, Request, **Any) -> Response + def send(self: Client, request: Request, **kwargs: Any) -> Response: parsed_url = None with capture_internal_exceptions(): parsed_url = parse_url(str(request.url), sanitize=False) @@ -60,12 +59,20 @@ def send(self, request, **kwargs): parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE, ), origin=HttpxIntegration.origin, + only_as_child_span=True, ) as span: - span.set_data(SPANDATA.HTTP_METHOD, request.method) + data = { + SPANDATA.HTTP_METHOD: request.method, + } + set_thread_info_from_span(data, span) + if parsed_url is not None: - span.set_data("url", parsed_url.url) - span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) - span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) + data["url"] = parsed_url.url + data[SPANDATA.HTTP_QUERY] = parsed_url.query + data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment + + for key, value in data.items(): + span.set_attribute(key, value) if should_propagate_trace(sentry_sdk.get_client(), str(request.url)): for ( @@ -86,19 +93,27 @@ def send(self, request, **kwargs): rv = real_send(self, request, **kwargs) span.set_http_status(rv.status_code) - span.set_data("reason", rv.reason_phrase) + span.set_attribute("reason", rv.reason_phrase) + + data[SPANDATA.HTTP_STATUS_CODE] = rv.status_code + data["reason"] = rv.reason_phrase + + sentry_sdk.add_breadcrumb( + type="http", + category="httplib", + data=data, + level=http_client_status_to_breadcrumb_level(rv.status_code), + ) return rv Client.send = send -def _install_httpx_async_client(): - # type: () -> None +def _install_httpx_async_client() -> None: real_send = AsyncClient.send - async def send(self, request, **kwargs): - # type: (AsyncClient, Request, **Any) -> Response + async def send(self: AsyncClient, request: Request, **kwargs: Any) -> Response: if sentry_sdk.get_client().get_integration(HttpxIntegration) is None: return await real_send(self, request, **kwargs) @@ -114,12 +129,18 @@ async def send(self, request, **kwargs): parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE, ), origin=HttpxIntegration.origin, + only_as_child_span=True, ) as span: - span.set_data(SPANDATA.HTTP_METHOD, request.method) + data = { + SPANDATA.HTTP_METHOD: request.method, + } if parsed_url is not None: - span.set_data("url", parsed_url.url) - span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) - span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) + data["url"] = parsed_url.url + data[SPANDATA.HTTP_QUERY] = parsed_url.query + data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment + + for key, value in data.items(): + span.set_attribute(key, value) if should_propagate_trace(sentry_sdk.get_client(), str(request.url)): for ( @@ -142,15 +163,26 @@ async def send(self, request, **kwargs): rv = await real_send(self, request, **kwargs) span.set_http_status(rv.status_code) - span.set_data("reason", rv.reason_phrase) + span.set_attribute("reason", rv.reason_phrase) + + data[SPANDATA.HTTP_STATUS_CODE] = rv.status_code + data["reason"] = rv.reason_phrase + + sentry_sdk.add_breadcrumb( + type="http", + category="httplib", + data=data, + level=http_client_status_to_breadcrumb_level(rv.status_code), + ) return rv AsyncClient.send = send -def _add_sentry_baggage_to_headers(headers, sentry_baggage): - # type: (MutableMapping[str, str], str) -> None +def _add_sentry_baggage_to_headers( + headers: MutableMapping[str, str], sentry_baggage: str +) -> None: """Add the Sentry baggage to the headers. This function directly mutates the provided headers. The provided sentry_baggage diff --git a/sentry_sdk/integrations/huey.py b/sentry_sdk/integrations/huey.py index f0aff4c0dd..8e741c5a2d 100644 --- a/sentry_sdk/integrations/huey.py +++ b/sentry_sdk/integrations/huey.py @@ -1,16 +1,18 @@ +from __future__ import annotations import sys from datetime import datetime import sentry_sdk -from sentry_sdk.api import continue_trace, get_baggage, get_traceparent -from sentry_sdk.consts import OP, SPANSTATUS -from sentry_sdk.integrations import DidNotEnable, Integration -from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import ( +from sentry_sdk.api import get_baggage, get_traceparent +from sentry_sdk.consts import ( + OP, + SPANSTATUS, BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME, TransactionSource, ) +from sentry_sdk.integrations import DidNotEnable, Integration +from sentry_sdk.scope import should_send_default_pii from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, @@ -44,23 +46,21 @@ class HueyIntegration(Integration): origin = f"auto.queue.{identifier}" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: patch_enqueue() patch_execute() -def patch_enqueue(): - # type: () -> None +def patch_enqueue() -> None: old_enqueue = Huey.enqueue @ensure_integration_enabled(HueyIntegration, old_enqueue) - def _sentry_enqueue(self, task): - # type: (Huey, Task) -> Optional[Union[Result, ResultGroup]] + def _sentry_enqueue(self: Huey, task: Task) -> Optional[Union[Result, ResultGroup]]: with sentry_sdk.start_span( op=OP.QUEUE_SUBMIT_HUEY, name=task.name, origin=HueyIntegration.origin, + only_as_child_span=True, ): if not isinstance(task, PeriodicTask): # Attach trace propagation data to task kwargs. We do @@ -75,10 +75,8 @@ def _sentry_enqueue(self, task): Huey.enqueue = _sentry_enqueue -def _make_event_processor(task): - # type: (Any) -> EventProcessor - def event_processor(event, hint): - # type: (Event, Hint) -> Optional[Event] +def _make_event_processor(task: Any) -> EventProcessor: + def event_processor(event: Event, hint: Hint) -> Optional[Event]: with capture_internal_exceptions(): tags = event.setdefault("tags", {}) @@ -105,15 +103,16 @@ def event_processor(event, hint): return event_processor -def _capture_exception(exc_info): - # type: (ExcInfo) -> None +def _capture_exception(exc_info: ExcInfo) -> None: scope = sentry_sdk.get_current_scope() - if exc_info[0] in HUEY_CONTROL_FLOW_EXCEPTIONS: - scope.transaction.set_status(SPANSTATUS.ABORTED) - return + if scope.root_span is not None: + if exc_info[0] in HUEY_CONTROL_FLOW_EXCEPTIONS: + scope.root_span.set_status(SPANSTATUS.ABORTED) + return + + scope.root_span.set_status(SPANSTATUS.INTERNAL_ERROR) - scope.transaction.set_status(SPANSTATUS.INTERNAL_ERROR) event, hint = event_from_exception( exc_info, client_options=sentry_sdk.get_client().options, @@ -122,12 +121,10 @@ def _capture_exception(exc_info): scope.capture_event(event, hint=hint) -def _wrap_task_execute(func): - # type: (F) -> F +def _wrap_task_execute(func: F) -> F: @ensure_integration_enabled(HueyIntegration, func) - def _sentry_execute(*args, **kwargs): - # type: (*Any, **Any) -> Any + def _sentry_execute(*args: Any, **kwargs: Any) -> Any: try: result = func(*args, **kwargs) except Exception: @@ -135,40 +132,40 @@ def _sentry_execute(*args, **kwargs): _capture_exception(exc_info) reraise(*exc_info) + root_span = sentry_sdk.get_current_scope().root_span + if root_span is not None: + root_span.set_status(SPANSTATUS.OK) + return result return _sentry_execute # type: ignore -def patch_execute(): - # type: () -> None +def patch_execute() -> None: old_execute = Huey._execute @ensure_integration_enabled(HueyIntegration, old_execute) - def _sentry_execute(self, task, timestamp=None): - # type: (Huey, Task, Optional[datetime]) -> Any + def _sentry_execute( + self: Huey, task: Task, timestamp: Optional[datetime] = None + ) -> Any: with sentry_sdk.isolation_scope() as scope: with capture_internal_exceptions(): scope._name = "huey" scope.clear_breadcrumbs() scope.add_event_processor(_make_event_processor(task)) - sentry_headers = task.kwargs.pop("sentry_headers", None) - - transaction = continue_trace( - sentry_headers or {}, - name=task.name, - op=OP.QUEUE_TASK_HUEY, - source=TransactionSource.TASK, - origin=HueyIntegration.origin, - ) - transaction.set_status(SPANSTATUS.OK) - if not getattr(task, "_sentry_is_patched", False): task.execute = _wrap_task_execute(task.execute) task._sentry_is_patched = True - with sentry_sdk.start_transaction(transaction): - return old_execute(self, task, timestamp) + sentry_headers = task.kwargs.pop("sentry_headers", {}) + with sentry_sdk.continue_trace(sentry_headers): + with sentry_sdk.start_span( + name=task.name, + op=OP.QUEUE_TASK_HUEY, + source=TransactionSource.TASK, + origin=HueyIntegration.origin, + ): + return old_execute(self, task, timestamp) Huey._execute = _sentry_execute diff --git a/sentry_sdk/integrations/huggingface_hub.py b/sentry_sdk/integrations/huggingface_hub.py index 2dfcb5925a..1389e8451f 100644 --- a/sentry_sdk/integrations/huggingface_hub.py +++ b/sentry_sdk/integrations/huggingface_hub.py @@ -1,3 +1,4 @@ +from __future__ import annotations from functools import wraps from sentry_sdk import consts @@ -27,13 +28,11 @@ class HuggingfaceHubIntegration(Integration): identifier = "huggingface_hub" origin = f"auto.ai.{identifier}" - def __init__(self, include_prompts=True): - # type: (HuggingfaceHubIntegration, bool) -> None + def __init__(self: HuggingfaceHubIntegration, include_prompts: bool = True) -> None: self.include_prompts = include_prompts @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: huggingface_hub.inference._client.InferenceClient.text_generation = ( _wrap_text_generation( huggingface_hub.inference._client.InferenceClient.text_generation @@ -41,8 +40,7 @@ def setup_once(): ) -def _capture_exception(exc): - # type: (Any) -> None +def _capture_exception(exc: Any) -> None: event, hint = event_from_exception( exc, client_options=sentry_sdk.get_client().options, @@ -51,11 +49,9 @@ def _capture_exception(exc): sentry_sdk.capture_event(event, hint=hint) -def _wrap_text_generation(f): - # type: (Callable[..., Any]) -> Callable[..., Any] +def _wrap_text_generation(f: Callable[..., Any]) -> Callable[..., Any]: @wraps(f) - def new_text_generation(*args, **kwargs): - # type: (*Any, **Any) -> Any + def new_text_generation(*args: Any, **kwargs: Any) -> Any: integration = sentry_sdk.get_client().get_integration(HuggingfaceHubIntegration) if integration is None: return f(*args, **kwargs) @@ -77,6 +73,7 @@ def new_text_generation(*args, **kwargs): op=consts.OP.HUGGINGFACE_HUB_CHAT_COMPLETIONS_CREATE, name="Text Generation", origin=HuggingfaceHubIntegration.origin, + only_as_child_span=True, ) span.__enter__() try: @@ -126,8 +123,7 @@ def new_text_generation(*args, **kwargs): if kwargs.get("details", False): # res is Iterable[TextGenerationStreamOutput] - def new_details_iterator(): - # type: () -> Iterable[ChatCompletionStreamOutput] + def new_details_iterator() -> Iterable[ChatCompletionStreamOutput]: with capture_internal_exceptions(): tokens_used = 0 data_buf: list[str] = [] @@ -158,8 +154,7 @@ def new_details_iterator(): else: # res is Iterable[str] - def new_iterator(): - # type: () -> Iterable[str] + def new_iterator() -> Iterable[str]: data_buf: list[str] = [] with capture_internal_exceptions(): for s in res: diff --git a/sentry_sdk/integrations/langchain.py b/sentry_sdk/integrations/langchain.py index 8b67c4c994..fb7d714252 100644 --- a/sentry_sdk/integrations/langchain.py +++ b/sentry_sdk/integrations/langchain.py @@ -1,10 +1,11 @@ +from __future__ import annotations import itertools from collections import OrderedDict from functools import wraps import sentry_sdk from sentry_sdk.ai.monitoring import set_ai_pipeline_name, record_token_usage -from sentry_sdk.consts import OP, SPANDATA +from sentry_sdk.consts import OP, SPANDATA, SPANSTATUS from sentry_sdk.ai.utils import set_data_normalized from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import Span @@ -61,38 +62,41 @@ class LangchainIntegration(Integration): max_spans = 1024 def __init__( - self, include_prompts=True, max_spans=1024, tiktoken_encoding_name=None - ): - # type: (LangchainIntegration, bool, int, Optional[str]) -> None + self: LangchainIntegration, + include_prompts: bool = True, + max_spans: int = 1024, + tiktoken_encoding_name: Optional[str] = None, + ) -> None: self.include_prompts = include_prompts self.max_spans = max_spans self.tiktoken_encoding_name = tiktoken_encoding_name @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: manager._configure = _wrap_configure(manager._configure) class WatchedSpan: - span = None # type: Span - num_completion_tokens = 0 # type: int - num_prompt_tokens = 0 # type: int - no_collect_tokens = False # type: bool - children = [] # type: List[WatchedSpan] - is_pipeline = False # type: bool - - def __init__(self, span): - # type: (Span) -> None + num_completion_tokens: int = 0 + num_prompt_tokens: int = 0 + no_collect_tokens: bool = False + children: List[WatchedSpan] = [] + is_pipeline: bool = False + + def __init__(self, span: Span) -> None: self.span = span class SentryLangchainCallback(BaseCallbackHandler): # type: ignore[misc] """Base callback handler that can be used to handle callbacks from langchain.""" - def __init__(self, max_span_map_size, include_prompts, tiktoken_encoding_name=None): - # type: (int, bool, Optional[str]) -> None - self.span_map = OrderedDict() # type: OrderedDict[UUID, WatchedSpan] + def __init__( + self, + max_span_map_size: int, + include_prompts: bool, + tiktoken_encoding_name: Optional[str] = None, + ) -> None: + self.span_map: OrderedDict[UUID, WatchedSpan] = OrderedDict() self.max_span_map_size = max_span_map_size self.include_prompts = include_prompts @@ -102,80 +106,88 @@ def __init__(self, max_span_map_size, include_prompts, tiktoken_encoding_name=No self.tiktoken_encoding = tiktoken.get_encoding(tiktoken_encoding_name) - def count_tokens(self, s): - # type: (str) -> int + def count_tokens(self, s: str) -> int: if self.tiktoken_encoding is not None: return len(self.tiktoken_encoding.encode_ordinary(s)) return 0 - def gc_span_map(self): - # type: () -> None + def gc_span_map(self) -> None: while len(self.span_map) > self.max_span_map_size: run_id, watched_span = self.span_map.popitem(last=False) self._exit_span(watched_span, run_id) - def _handle_error(self, run_id, error): - # type: (UUID, Any) -> None + def _handle_error(self, run_id: UUID, error: Any) -> None: if not run_id or run_id not in self.span_map: return span_data = self.span_map[run_id] if not span_data: return - sentry_sdk.capture_exception(error, span_data.span.scope) - span_data.span.__exit__(None, None, None) + sentry_sdk.capture_exception(error) + span_data.span.set_status(SPANSTATUS.INTERNAL_ERROR) + span_data.span.finish() del self.span_map[run_id] - def _normalize_langchain_message(self, message): - # type: (BaseMessage) -> Any + def _normalize_langchain_message(self, message: BaseMessage) -> Any: parsed = {"content": message.content, "role": message.type} parsed.update(message.additional_kwargs) return parsed - def _create_span(self, run_id, parent_id, **kwargs): - # type: (SentryLangchainCallback, UUID, Optional[Any], Any) -> WatchedSpan - - watched_span = None # type: Optional[WatchedSpan] - if parent_id: - parent_span = self.span_map.get(parent_id) # type: Optional[WatchedSpan] - if parent_span: - watched_span = WatchedSpan(parent_span.span.start_child(**kwargs)) - parent_span.children.append(watched_span) - if watched_span is None: - watched_span = WatchedSpan(sentry_sdk.start_span(**kwargs)) + def _create_span( + self: SentryLangchainCallback, + run_id: UUID, + parent_id: Optional[Any], + **kwargs: Any, + ) -> WatchedSpan: + + parent_watched_span = self.span_map.get(parent_id) if parent_id else None + sentry_span = sentry_sdk.start_span( + parent_span=parent_watched_span.span if parent_watched_span else None, + only_as_child_span=True, + **kwargs, + ) + watched_span = WatchedSpan(sentry_span) + if parent_watched_span: + parent_watched_span.children.append(watched_span) if kwargs.get("op", "").startswith("ai.pipeline."): if kwargs.get("name"): set_ai_pipeline_name(kwargs.get("name")) watched_span.is_pipeline = True - watched_span.span.__enter__() + # the same run_id is reused for the pipeline it seems + # so we need to end the older span to avoid orphan spans + existing_span_data = self.span_map.get(run_id) + if existing_span_data is not None: + self._exit_span(existing_span_data, run_id) + self.span_map[run_id] = watched_span self.gc_span_map() return watched_span - def _exit_span(self, span_data, run_id): - # type: (SentryLangchainCallback, WatchedSpan, UUID) -> None + def _exit_span( + self: SentryLangchainCallback, span_data: WatchedSpan, run_id: UUID + ) -> None: if span_data.is_pipeline: set_ai_pipeline_name(None) - span_data.span.__exit__(None, None, None) + span_data.span.set_status(SPANSTATUS.OK) + span_data.span.finish() del self.span_map[run_id] def on_llm_start( - self, - serialized, - prompts, + self: SentryLangchainCallback, + serialized: Dict[str, Any], + prompts: List[str], *, - run_id, - tags=None, - parent_run_id=None, - metadata=None, - **kwargs, - ): - # type: (SentryLangchainCallback, Dict[str, Any], List[str], UUID, Optional[List[str]], Optional[UUID], Optional[Dict[str, Any]], Any) -> Any + run_id: UUID, + tags: Optional[List[str]] = None, + parent_run_id: Optional[UUID] = None, + metadata: Optional[Dict[str, Any]] = None, + **kwargs: Any, + ) -> Any: """Run when LLM starts running.""" with capture_internal_exceptions(): if not run_id: @@ -196,8 +208,14 @@ def on_llm_start( if k in all_params: set_data_normalized(span, v, all_params[k]) - def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs): - # type: (SentryLangchainCallback, Dict[str, Any], List[List[BaseMessage]], UUID, Any) -> Any + def on_chat_model_start( + self: SentryLangchainCallback, + serialized: Dict[str, Any], + messages: List[List[BaseMessage]], + *, + run_id: UUID, + **kwargs: Any, + ) -> Any: """Run when Chat Model starts running.""" with capture_internal_exceptions(): if not run_id: @@ -222,7 +240,7 @@ def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs): if not model and "anthropic" in all_params.get("_type"): model = "claude-2" if model: - span.set_data(SPANDATA.AI_MODEL_ID, model) + span.set_attribute(SPANDATA.AI_MODEL_ID, model) if should_send_default_pii() and self.include_prompts: set_data_normalized( span, @@ -242,8 +260,9 @@ def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs): message.content ) + self.count_tokens(message.type) - def on_llm_new_token(self, token, *, run_id, **kwargs): - # type: (SentryLangchainCallback, str, UUID, Any) -> Any + def on_llm_new_token( + self: SentryLangchainCallback, token: str, *, run_id: UUID, **kwargs: Any + ) -> Any: """Run on new LLM token. Only available when streaming is enabled.""" with capture_internal_exceptions(): if not run_id or run_id not in self.span_map: @@ -253,8 +272,13 @@ def on_llm_new_token(self, token, *, run_id, **kwargs): return span_data.num_completion_tokens += self.count_tokens(token) - def on_llm_end(self, response, *, run_id, **kwargs): - # type: (SentryLangchainCallback, LLMResult, UUID, Any) -> Any + def on_llm_end( + self: SentryLangchainCallback, + response: LLMResult, + *, + run_id: UUID, + **kwargs: Any, + ) -> Any: """Run when LLM ends running.""" with capture_internal_exceptions(): if not run_id: @@ -292,14 +316,25 @@ def on_llm_end(self, response, *, run_id, **kwargs): self._exit_span(span_data, run_id) - def on_llm_error(self, error, *, run_id, **kwargs): - # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any + def on_llm_error( + self: SentryLangchainCallback, + error: Union[Exception, KeyboardInterrupt], + *, + run_id: UUID, + **kwargs: Any, + ) -> Any: """Run when LLM errors.""" with capture_internal_exceptions(): self._handle_error(run_id, error) - def on_chain_start(self, serialized, inputs, *, run_id, **kwargs): - # type: (SentryLangchainCallback, Dict[str, Any], Dict[str, Any], UUID, Any) -> Any + def on_chain_start( + self: SentryLangchainCallback, + serialized: Dict[str, Any], + inputs: Dict[str, Any], + *, + run_id: UUID, + **kwargs: Any, + ) -> Any: """Run when chain starts running.""" with capture_internal_exceptions(): if not run_id: @@ -319,8 +354,13 @@ def on_chain_start(self, serialized, inputs, *, run_id, **kwargs): if metadata: set_data_normalized(watched_span.span, SPANDATA.AI_METADATA, metadata) - def on_chain_end(self, outputs, *, run_id, **kwargs): - # type: (SentryLangchainCallback, Dict[str, Any], UUID, Any) -> Any + def on_chain_end( + self: SentryLangchainCallback, + outputs: Dict[str, Any], + *, + run_id: UUID, + **kwargs: Any, + ) -> Any: """Run when chain ends running.""" with capture_internal_exceptions(): if not run_id or run_id not in self.span_map: @@ -331,13 +371,23 @@ def on_chain_end(self, outputs, *, run_id, **kwargs): return self._exit_span(span_data, run_id) - def on_chain_error(self, error, *, run_id, **kwargs): - # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any + def on_chain_error( + self: SentryLangchainCallback, + error: Union[Exception, KeyboardInterrupt], + *, + run_id: UUID, + **kwargs: Any, + ) -> Any: """Run when chain errors.""" self._handle_error(run_id, error) - def on_agent_action(self, action, *, run_id, **kwargs): - # type: (SentryLangchainCallback, AgentAction, UUID, Any) -> Any + def on_agent_action( + self: SentryLangchainCallback, + action: AgentAction, + *, + run_id: UUID, + **kwargs: Any, + ) -> Any: with capture_internal_exceptions(): if not run_id: return @@ -353,8 +403,13 @@ def on_agent_action(self, action, *, run_id, **kwargs): watched_span.span, SPANDATA.AI_INPUT_MESSAGES, action.tool_input ) - def on_agent_finish(self, finish, *, run_id, **kwargs): - # type: (SentryLangchainCallback, AgentFinish, UUID, Any) -> Any + def on_agent_finish( + self: SentryLangchainCallback, + finish: AgentFinish, + *, + run_id: UUID, + **kwargs: Any, + ) -> Any: with capture_internal_exceptions(): if not run_id: return @@ -368,8 +423,14 @@ def on_agent_finish(self, finish, *, run_id, **kwargs): ) self._exit_span(span_data, run_id) - def on_tool_start(self, serialized, input_str, *, run_id, **kwargs): - # type: (SentryLangchainCallback, Dict[str, Any], str, UUID, Any) -> Any + def on_tool_start( + self: SentryLangchainCallback, + serialized: Dict[str, Any], + input_str: str, + *, + run_id: UUID, + **kwargs: Any, + ) -> Any: """Run when tool starts running.""" with capture_internal_exceptions(): if not run_id: @@ -392,8 +453,9 @@ def on_tool_start(self, serialized, input_str, *, run_id, **kwargs): watched_span.span, SPANDATA.AI_METADATA, kwargs.get("metadata") ) - def on_tool_end(self, output, *, run_id, **kwargs): - # type: (SentryLangchainCallback, str, UUID, Any) -> Any + def on_tool_end( + self: SentryLangchainCallback, output: str, *, run_id: UUID, **kwargs: Any + ) -> Any: """Run when tool ends running.""" with capture_internal_exceptions(): if not run_id or run_id not in self.span_map: @@ -406,24 +468,27 @@ def on_tool_end(self, output, *, run_id, **kwargs): set_data_normalized(span_data.span, SPANDATA.AI_RESPONSES, output) self._exit_span(span_data, run_id) - def on_tool_error(self, error, *args, run_id, **kwargs): - # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any + def on_tool_error( + self, + error: SentryLangchainCallback, + *args: Union[Exception, KeyboardInterrupt], + run_id: UUID, + **kwargs: Any, + ) -> Any: """Run when tool errors.""" self._handle_error(run_id, error) -def _wrap_configure(f): - # type: (Callable[..., Any]) -> Callable[..., Any] +def _wrap_configure(f: Callable[..., Any]) -> Callable[..., Any]: @wraps(f) def new_configure( - callback_manager_cls, # type: type - inheritable_callbacks=None, # type: Callbacks - local_callbacks=None, # type: Callbacks - *args, # type: Any - **kwargs, # type: Any - ): - # type: (...) -> Any + callback_manager_cls: type, + inheritable_callbacks: Callbacks = None, + local_callbacks: Callbacks = None, + *args: Any, + **kwargs: Any, + ) -> Any: integration = sentry_sdk.get_client().get_integration(LangchainIntegration) if integration is None: diff --git a/sentry_sdk/integrations/launchdarkly.py b/sentry_sdk/integrations/launchdarkly.py index d3c423e7be..18081e617a 100644 --- a/sentry_sdk/integrations/launchdarkly.py +++ b/sentry_sdk/integrations/launchdarkly.py @@ -1,3 +1,4 @@ +from __future__ import annotations from typing import TYPE_CHECKING from sentry_sdk.feature_flags import add_feature_flag @@ -20,8 +21,7 @@ class LaunchDarklyIntegration(Integration): identifier = "launchdarkly" - def __init__(self, ld_client=None): - # type: (LDClient | None) -> None + def __init__(self, ld_client: LDClient | None = None) -> None: """ :param client: An initialized LDClient instance. If a client is not provided, this integration will attempt to use the shared global instance. @@ -38,25 +38,28 @@ def __init__(self, ld_client=None): client.add_hook(LaunchDarklyHook()) @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: pass class LaunchDarklyHook(Hook): @property - def metadata(self): - # type: () -> Metadata + def metadata(self) -> Metadata: return Metadata(name="sentry-flag-auditor") - def after_evaluation(self, series_context, data, detail): - # type: (EvaluationSeriesContext, dict[Any, Any], EvaluationDetail) -> dict[Any, Any] + def after_evaluation( + self, + series_context: EvaluationSeriesContext, + data: dict[Any, Any], + detail: EvaluationDetail, + ) -> dict[Any, Any]: if isinstance(detail.value, bool): add_feature_flag(series_context.key, detail.value) return data - def before_evaluation(self, series_context, data): - # type: (EvaluationSeriesContext, dict[Any, Any]) -> dict[Any, Any] + def before_evaluation( + self, series_context: EvaluationSeriesContext, data: dict[Any, Any] + ) -> dict[Any, Any]: return data # No-op. diff --git a/sentry_sdk/integrations/litestar.py b/sentry_sdk/integrations/litestar.py index 4e15081cba..a857f9612b 100644 --- a/sentry_sdk/integrations/litestar.py +++ b/sentry_sdk/integrations/litestar.py @@ -1,6 +1,7 @@ +from __future__ import annotations from collections.abc import Set import sentry_sdk -from sentry_sdk.consts import OP +from sentry_sdk.consts import OP, TransactionSource, SOURCE_FOR_STYLE from sentry_sdk.integrations import ( _DEFAULT_FAILED_REQUEST_STATUS_CODES, DidNotEnable, @@ -9,7 +10,6 @@ from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import TransactionSource, SOURCE_FOR_STYLE from sentry_sdk.utils import ( ensure_integration_enabled, event_from_exception, @@ -53,13 +53,12 @@ class LitestarIntegration(Integration): def __init__( self, - failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES, # type: Set[int] + failed_request_status_codes: Set[int] = _DEFAULT_FAILED_REQUEST_STATUS_CODES, ) -> None: self.failed_request_status_codes = failed_request_status_codes @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: patch_app_init() patch_middlewares() patch_http_route_handle() @@ -76,8 +75,9 @@ def setup_once(): class SentryLitestarASGIMiddleware(SentryAsgiMiddleware): - def __init__(self, app, span_origin=LitestarIntegration.origin): - # type: (ASGIApp, str) -> None + def __init__( + self, app: ASGIApp, span_origin: str = LitestarIntegration.origin + ) -> None: super().__init__( app=app, @@ -87,8 +87,7 @@ def __init__(self, app, span_origin=LitestarIntegration.origin): span_origin=span_origin, ) - def _capture_request_exception(self, exc): - # type: (Exception) -> None + def _capture_request_exception(self, exc: Exception) -> None: """Avoid catching exceptions from request handlers. Those exceptions are already handled in Litestar.after_exception handler. @@ -97,8 +96,7 @@ def _capture_request_exception(self, exc): pass -def patch_app_init(): - # type: () -> None +def patch_app_init() -> None: """ Replaces the Litestar class's `__init__` function in order to inject `after_exception` handlers and set the `SentryLitestarASGIMiddleware` as the outmost middleware in the stack. @@ -109,8 +107,7 @@ def patch_app_init(): old__init__ = Litestar.__init__ @ensure_integration_enabled(LitestarIntegration, old__init__) - def injection_wrapper(self, *args, **kwargs): - # type: (Litestar, *Any, **Any) -> None + def injection_wrapper(self: Litestar, *args: Any, **kwargs: Any) -> None: kwargs["after_exception"] = [ exception_handler, *(kwargs.get("after_exception") or []), @@ -124,13 +121,11 @@ def injection_wrapper(self, *args, **kwargs): Litestar.__init__ = injection_wrapper -def patch_middlewares(): - # type: () -> None +def patch_middlewares() -> None: old_resolve_middleware_stack = BaseRouteHandler.resolve_middleware @ensure_integration_enabled(LitestarIntegration, old_resolve_middleware_stack) - def resolve_middleware_wrapper(self): - # type: (BaseRouteHandler) -> list[Middleware] + def resolve_middleware_wrapper(self: BaseRouteHandler) -> list[Middleware]: return [ enable_span_for_middleware(middleware) for middleware in old_resolve_middleware_stack(self) @@ -139,8 +134,7 @@ def resolve_middleware_wrapper(self): BaseRouteHandler.resolve_middleware = resolve_middleware_wrapper -def enable_span_for_middleware(middleware): - # type: (Middleware) -> Middleware +def enable_span_for_middleware(middleware: Middleware) -> Middleware: if ( not hasattr(middleware, "__call__") # noqa: B004 or middleware is SentryLitestarASGIMiddleware @@ -148,12 +142,13 @@ def enable_span_for_middleware(middleware): return middleware if isinstance(middleware, DefineMiddleware): - old_call = middleware.middleware.__call__ # type: ASGIApp + old_call: ASGIApp = middleware.middleware.__call__ else: old_call = middleware.__call__ - async def _create_span_call(self, scope, receive, send): - # type: (MiddlewareProtocol, LitestarScope, Receive, Send) -> None + async def _create_span_call( + self: MiddlewareProtocol, scope: LitestarScope, receive: Receive, send: Send + ) -> None: if sentry_sdk.get_client().get_integration(LitestarIntegration) is None: return await old_call(self, scope, receive, send) @@ -162,18 +157,21 @@ async def _create_span_call(self, scope, receive, send): op=OP.MIDDLEWARE_LITESTAR, name=middleware_name, origin=LitestarIntegration.origin, + only_as_child_span=True, ) as middleware_span: middleware_span.set_tag("litestar.middleware_name", middleware_name) # Creating spans for the "receive" callback - async def _sentry_receive(*args, **kwargs): - # type: (*Any, **Any) -> Union[HTTPReceiveMessage, WebSocketReceiveMessage] + async def _sentry_receive( + *args: Any, **kwargs: Any + ) -> Union[HTTPReceiveMessage, WebSocketReceiveMessage]: if sentry_sdk.get_client().get_integration(LitestarIntegration) is None: return await receive(*args, **kwargs) with sentry_sdk.start_span( op=OP.MIDDLEWARE_LITESTAR_RECEIVE, name=getattr(receive, "__qualname__", str(receive)), origin=LitestarIntegration.origin, + only_as_child_span=True, ) as span: span.set_tag("litestar.middleware_name", middleware_name) return await receive(*args, **kwargs) @@ -183,14 +181,14 @@ async def _sentry_receive(*args, **kwargs): new_receive = _sentry_receive if not receive_patched else receive # Creating spans for the "send" callback - async def _sentry_send(message): - # type: (Message) -> None + async def _sentry_send(message: Message) -> None: if sentry_sdk.get_client().get_integration(LitestarIntegration) is None: return await send(message) with sentry_sdk.start_span( op=OP.MIDDLEWARE_LITESTAR_SEND, name=getattr(send, "__qualname__", str(send)), origin=LitestarIntegration.origin, + only_as_child_span=True, ) as span: span.set_tag("litestar.middleware_name", middleware_name) return await send(message) @@ -212,19 +210,19 @@ async def _sentry_send(message): return middleware -def patch_http_route_handle(): - # type: () -> None +def patch_http_route_handle() -> None: old_handle = HTTPRoute.handle - async def handle_wrapper(self, scope, receive, send): - # type: (HTTPRoute, HTTPScope, Receive, Send) -> None + async def handle_wrapper( + self: HTTPRoute, scope: HTTPScope, receive: Receive, send: Send + ) -> None: if sentry_sdk.get_client().get_integration(LitestarIntegration) is None: return await old_handle(self, scope, receive, send) sentry_scope = sentry_sdk.get_isolation_scope() - request = scope["app"].request_class( + request: Request[Any, Any] = scope["app"].request_class( scope=scope, receive=receive, send=send - ) # type: Request[Any, Any] + ) extracted_request_data = ConnectionDataExtractor( parse_body=True, parse_query=True )(request) @@ -232,8 +230,7 @@ async def handle_wrapper(self, scope, receive, send): request_data = await body - def event_processor(event, _): - # type: (Event, Hint) -> Event + def event_processor(event: Event, _: Hint) -> Event: route_handler = scope.get("route_handler") request_info = event.get("request", {}) @@ -277,8 +274,7 @@ def event_processor(event, _): HTTPRoute.handle = handle_wrapper -def retrieve_user_from_scope(scope): - # type: (LitestarScope) -> Optional[dict[str, Any]] +def retrieve_user_from_scope(scope: LitestarScope) -> Optional[dict[str, Any]]: scope_user = scope.get("user") if isinstance(scope_user, dict): return scope_user @@ -289,9 +285,8 @@ def retrieve_user_from_scope(scope): @ensure_integration_enabled(LitestarIntegration) -def exception_handler(exc, scope): - # type: (Exception, LitestarScope) -> None - user_info = None # type: Optional[dict[str, Any]] +def exception_handler(exc: Exception, scope: LitestarScope) -> None: + user_info: Optional[dict[str, Any]] = None if should_send_default_pii(): user_info = retrieve_user_from_scope(scope) if user_info and isinstance(user_info, dict): diff --git a/sentry_sdk/integrations/logging.py b/sentry_sdk/integrations/logging.py index 15ff2ed233..5d03be1139 100644 --- a/sentry_sdk/integrations/logging.py +++ b/sentry_sdk/integrations/logging.py @@ -1,3 +1,4 @@ +from __future__ import annotations import logging import sys from datetime import datetime, timezone @@ -12,7 +13,6 @@ event_from_exception, current_stacktrace, capture_internal_exceptions, - has_logs_enabled, ) from sentry_sdk.integrations import Integration @@ -26,7 +26,7 @@ from typing import Optional DEFAULT_LEVEL = logging.INFO -DEFAULT_EVENT_LEVEL = logging.ERROR +DEFAULT_EVENT_LEVEL = None # None means no events are captured LOGGING_TO_EVENT_LEVEL = { logging.NOTSET: "notset", logging.DEBUG: "debug", @@ -55,14 +55,18 @@ # Note: Ignoring by logger name here is better than mucking with thread-locals. # We do not necessarily know whether thread-locals work 100% correctly in the user's environment. _IGNORED_LOGGERS = set( - ["sentry_sdk.errors", "urllib3.connectionpool", "urllib3.connection"] + [ + "sentry_sdk.errors", + "urllib3.connectionpool", + "urllib3.connection", + "opentelemetry.*", + ] ) def ignore_logger( - name, # type: str -): - # type: (...) -> None + name: str, +) -> None: """This disables recording (both in breadcrumbs and as events) calls to a logger of a specific name. Among other uses, many of our integrations use this to prevent their actions being recorded as breadcrumbs. Exposed @@ -78,11 +82,10 @@ class LoggingIntegration(Integration): def __init__( self, - level=DEFAULT_LEVEL, - event_level=DEFAULT_EVENT_LEVEL, - sentry_logs_level=DEFAULT_LEVEL, - ): - # type: (Optional[int], Optional[int], Optional[int]) -> None + level: Optional[int] = DEFAULT_LEVEL, + event_level: Optional[int] = DEFAULT_EVENT_LEVEL, + sentry_logs_level: Optional[int] = DEFAULT_LEVEL, + ) -> None: self._handler = None self._breadcrumb_handler = None self._sentry_logs_handler = None @@ -96,8 +99,7 @@ def __init__( if event_level is not None: self._handler = EventHandler(level=event_level) - def _handle_record(self, record): - # type: (LogRecord) -> None + def _handle_record(self, record: LogRecord) -> None: if self._handler is not None and record.levelno >= self._handler.level: self._handler.handle(record) @@ -114,12 +116,10 @@ def _handle_record(self, record): self._sentry_logs_handler.handle(record) @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: old_callhandlers = logging.Logger.callHandlers - def sentry_patched_callhandlers(self, record): - # type: (Any, LogRecord) -> Any + def sentry_patched_callhandlers(self: Any, record: LogRecord) -> Any: # keeping a local reference because the # global might be discarded on shutdown ignored_loggers = _IGNORED_LOGGERS @@ -175,22 +175,19 @@ class _BaseHandler(logging.Handler): ) ) - def _can_record(self, record): - # type: (LogRecord) -> bool + def _can_record(self, record: LogRecord) -> bool: """Prevents ignored loggers from recording""" for logger in _IGNORED_LOGGERS: if fnmatch(record.name.strip(), logger): return False return True - def _logging_to_event_level(self, record): - # type: (LogRecord) -> str + def _logging_to_event_level(self, record: LogRecord) -> str: return LOGGING_TO_EVENT_LEVEL.get( record.levelno, record.levelname.lower() if record.levelname else "" ) - def _extra_from_record(self, record): - # type: (LogRecord) -> MutableMapping[str, object] + def _extra_from_record(self, record: LogRecord) -> MutableMapping[str, object]: return { k: v for k, v in vars(record).items() @@ -206,14 +203,12 @@ class EventHandler(_BaseHandler): Note that you do not have to use this class if the logging integration is enabled, which it is by default. """ - def emit(self, record): - # type: (LogRecord) -> Any + def emit(self, record: LogRecord) -> Any: with capture_internal_exceptions(): self.format(record) return self._emit(record) - def _emit(self, record): - # type: (LogRecord) -> None + def _emit(self, record: LogRecord) -> None: if not self._can_record(record): return @@ -300,14 +295,12 @@ class BreadcrumbHandler(_BaseHandler): Note that you do not have to use this class if the logging integration is enabled, which it is by default. """ - def emit(self, record): - # type: (LogRecord) -> Any + def emit(self, record: LogRecord) -> Any: with capture_internal_exceptions(): self.format(record) return self._emit(record) - def _emit(self, record): - # type: (LogRecord) -> None + def _emit(self, record: LogRecord) -> None: if not self._can_record(record): return @@ -315,8 +308,7 @@ def _emit(self, record): self._breadcrumb_from_record(record), hint={"log_record": record} ) - def _breadcrumb_from_record(self, record): - # type: (LogRecord) -> Dict[str, Any] + def _breadcrumb_from_record(self, record: LogRecord) -> Dict[str, Any]: return { "type": "log", "level": self._logging_to_event_level(record), @@ -334,8 +326,7 @@ class SentryLogsHandler(_BaseHandler): Note that you do not have to use this class if the logging integration is enabled, which it is by default. """ - def emit(self, record): - # type: (LogRecord) -> Any + def emit(self, record: LogRecord) -> Any: with capture_internal_exceptions(): self.format(record) if not self._can_record(record): @@ -345,18 +336,17 @@ def emit(self, record): if not client.is_active(): return - if not has_logs_enabled(client.options): + if client.options.get("enable_logs") is not True: return self._capture_log_from_record(client, record) - def _capture_log_from_record(self, client, record): - # type: (BaseClient, LogRecord) -> None + def _capture_log_from_record(self, client: BaseClient, record: LogRecord) -> None: otel_severity_number, otel_severity_text = _log_level_to_otel( record.levelno, SEVERITY_TO_OTEL_SEVERITY ) project_root = client.options["project_root"] - attrs = self._extra_from_record(record) # type: Any + attrs: Any = self._extra_from_record(record) attrs["sentry.origin"] = "auto.logger.log" if isinstance(record.msg, str): attrs["sentry.message.template"] = record.msg diff --git a/sentry_sdk/integrations/loguru.py b/sentry_sdk/integrations/loguru.py index b910b9a407..7570e7b744 100644 --- a/sentry_sdk/integrations/loguru.py +++ b/sentry_sdk/integrations/loguru.py @@ -1,3 +1,4 @@ +from __future__ import annotations import enum import sentry_sdk @@ -8,7 +9,6 @@ _BaseHandler, ) from sentry_sdk.logger import _log_level_to_otel -from sentry_sdk.utils import has_logs_enabled from typing import TYPE_CHECKING @@ -66,21 +66,20 @@ class LoggingLevels(enum.IntEnum): class LoguruIntegration(Integration): identifier = "loguru" - level = DEFAULT_LEVEL # type: Optional[int] - event_level = DEFAULT_EVENT_LEVEL # type: Optional[int] + level: Optional[int] = DEFAULT_LEVEL + event_level: Optional[int] = DEFAULT_EVENT_LEVEL breadcrumb_format = DEFAULT_FORMAT event_format = DEFAULT_FORMAT - sentry_logs_level = DEFAULT_LEVEL # type: Optional[int] + sentry_logs_level: Optional[int] = DEFAULT_LEVEL def __init__( self, - level=DEFAULT_LEVEL, - event_level=DEFAULT_EVENT_LEVEL, - breadcrumb_format=DEFAULT_FORMAT, - event_format=DEFAULT_FORMAT, - sentry_logs_level=DEFAULT_LEVEL, - ): - # type: (Optional[int], Optional[int], str | loguru.FormatFunction, str | loguru.FormatFunction, Optional[int]) -> None + level: Optional[int] = DEFAULT_LEVEL, + event_level: Optional[int] = DEFAULT_EVENT_LEVEL, + breadcrumb_format: str | loguru.FormatFunction = DEFAULT_FORMAT, + event_format: str | loguru.FormatFunction = DEFAULT_FORMAT, + sentry_logs_level: Optional[int] = DEFAULT_LEVEL, + ) -> None: LoguruIntegration.level = level LoguruIntegration.event_level = event_level LoguruIntegration.breadcrumb_format = breadcrumb_format @@ -88,8 +87,7 @@ def __init__( LoguruIntegration.sentry_logs_level = sentry_logs_level @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: if LoguruIntegration.level is not None: logger.add( LoguruBreadcrumbHandler(level=LoguruIntegration.level), @@ -112,8 +110,7 @@ def setup_once(): class _LoguruBaseHandler(_BaseHandler): - def __init__(self, *args, **kwargs): - # type: (*Any, **Any) -> None + def __init__(self, *args: Any, **kwargs: Any) -> None: if kwargs.get("level"): kwargs["level"] = SENTRY_LEVEL_FROM_LOGURU_LEVEL.get( kwargs.get("level", ""), DEFAULT_LEVEL @@ -121,8 +118,7 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - def _logging_to_event_level(self, record): - # type: (LogRecord) -> str + def _logging_to_event_level(self, record: LogRecord) -> str: try: return SENTRY_LEVEL_FROM_LOGURU_LEVEL[ LoggingLevels(record.levelno).name @@ -143,8 +139,7 @@ class LoguruBreadcrumbHandler(_LoguruBaseHandler, BreadcrumbHandler): pass -def loguru_sentry_logs_handler(message): - # type: (Message) -> None +def loguru_sentry_logs_handler(message: Message) -> None: # This is intentionally a callable sink instead of a standard logging handler # since otherwise we wouldn't get direct access to message.record client = sentry_sdk.get_client() @@ -152,7 +147,7 @@ def loguru_sentry_logs_handler(message): if not client.is_active(): return - if not has_logs_enabled(client.options): + if client.options.get("enable_logs") is not True: return record = message.record @@ -167,7 +162,7 @@ def loguru_sentry_logs_handler(message): record["level"].no, SEVERITY_TO_OTEL_SEVERITY ) - attrs = {"sentry.origin": "auto.logger.loguru"} # type: dict[str, Any] + attrs: dict[str, Any] = {"sentry.origin": "auto.logger.loguru"} project_root = client.options["project_root"] if record.get("file"): diff --git a/sentry_sdk/integrations/modules.py b/sentry_sdk/integrations/modules.py index ce3ee78665..a289ce1989 100644 --- a/sentry_sdk/integrations/modules.py +++ b/sentry_sdk/integrations/modules.py @@ -1,3 +1,4 @@ +from __future__ import annotations import sentry_sdk from sentry_sdk.integrations import Integration from sentry_sdk.scope import add_global_event_processor @@ -14,11 +15,9 @@ class ModulesIntegration(Integration): identifier = "modules" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: @add_global_event_processor - def processor(event, hint): - # type: (Event, Any) -> Event + def processor(event: Event, hint: Any) -> Event: if event.get("type") == "transaction": return event diff --git a/sentry_sdk/integrations/openai.py b/sentry_sdk/integrations/openai.py index 187f795807..dc9f5a0172 100644 --- a/sentry_sdk/integrations/openai.py +++ b/sentry_sdk/integrations/openai.py @@ -1,3 +1,4 @@ +from __future__ import annotations from functools import wraps import sentry_sdk @@ -46,8 +47,11 @@ class OpenAIIntegration(Integration): identifier = "openai" origin = f"auto.ai.{identifier}" - def __init__(self, include_prompts=True, tiktoken_encoding_name=None): - # type: (OpenAIIntegration, bool, Optional[str]) -> None + def __init__( + self: OpenAIIntegration, + include_prompts: bool = True, + tiktoken_encoding_name: Optional[str] = None, + ) -> None: self.include_prompts = include_prompts self.tiktoken_encoding = None @@ -57,8 +61,7 @@ def __init__(self, include_prompts=True, tiktoken_encoding_name=None): self.tiktoken_encoding = tiktoken.get_encoding(tiktoken_encoding_name) @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: Completions.create = _wrap_chat_completion_create(Completions.create) AsyncCompletions.create = _wrap_async_chat_completion_create( AsyncCompletions.create @@ -71,15 +74,13 @@ def setup_once(): Responses.create = _wrap_responses_create(Responses.create) AsyncResponses.create = _wrap_async_responses_create(AsyncResponses.create) - def count_tokens(self, s): - # type: (OpenAIIntegration, str) -> int + def count_tokens(self: OpenAIIntegration, s: str) -> int: if self.tiktoken_encoding is not None: return len(self.tiktoken_encoding.encode_ordinary(s)) return 0 -def _capture_exception(exc): - # type: (Any) -> None +def _capture_exception(exc: Any) -> None: # Close an eventually open span # We need to do this by hand because we are not using the start_span context manager current_span = sentry_sdk.get_current_span() @@ -94,8 +95,7 @@ def _capture_exception(exc): sentry_sdk.capture_event(event, hint=hint) -def _get_usage(usage, names): - # type: (Any, List[str]) -> int +def _get_usage(usage: Any, names: List[str]) -> int: for name in names: if hasattr(usage, name) and isinstance(getattr(usage, name), int): return getattr(usage, name) @@ -103,14 +103,17 @@ def _get_usage(usage, names): def _calculate_token_usage( - messages, response, span, streaming_message_responses, count_tokens -): - # type: (Optional[Iterable[ChatCompletionMessageParam]], Any, Span, Optional[List[str]], Callable[..., Any]) -> None - input_tokens = 0 # type: Optional[int] - input_tokens_cached = 0 # type: Optional[int] - output_tokens = 0 # type: Optional[int] - output_tokens_reasoning = 0 # type: Optional[int] - total_tokens = 0 # type: Optional[int] + messages: Optional[Iterable[ChatCompletionMessageParam]], + response: Any, + span: Span, + streaming_message_responses: Optional[List[str]], + count_tokens: Callable[..., Any], +) -> None: + input_tokens: Optional[int] = 0 + input_tokens_cached: Optional[int] = 0 + output_tokens: Optional[int] = 0 + output_tokens_reasoning: Optional[int] = 0 + total_tokens: Optional[int] = 0 if hasattr(response, "usage"): input_tokens = _get_usage(response.usage, ["input_tokens", "prompt_tokens"]) @@ -163,8 +166,9 @@ def _calculate_token_usage( ) -def _set_input_data(span, kwargs, operation, integration): - # type: (Span, dict[str, Any], str, OpenAIIntegration) -> None +def _set_input_data( + span: Span, kwargs: Any, operation: str, integration: OpenAIIntegration +) -> None: # Input messages (the prompt or data sent to the model) messages = kwargs.get("messages") if messages is None: @@ -209,8 +213,13 @@ def _set_input_data(span, kwargs, operation, integration): ) -def _set_output_data(span, response, kwargs, integration, finish_span=True): - # type: (Span, Any, dict[str, Any], OpenAIIntegration, bool) -> None +def _set_output_data( + span: Span, + response: Any, + kwargs: Any, + integration: OpenAIIntegration, + finish_span: bool = True, +) -> None: if hasattr(response, "model"): set_data_normalized(span, SPANDATA.GEN_AI_RESPONSE_MODEL, response.model) @@ -254,8 +263,7 @@ def _set_output_data(span, response, kwargs, integration, finish_span=True): old_iterator = response._iterator - def new_iterator(): - # type: () -> Iterator[ChatCompletionChunk] + def new_iterator() -> Iterator[ChatCompletionChunk]: with capture_internal_exceptions(): count_tokens_manually = True for x in old_iterator: @@ -309,8 +317,7 @@ def new_iterator(): if finish_span: span.__exit__(None, None, None) - async def new_iterator_async(): - # type: () -> AsyncIterator[ChatCompletionChunk] + async def new_iterator_async() -> AsyncIterator[ChatCompletionChunk]: with capture_internal_exceptions(): count_tokens_manually = True async for x in old_iterator: @@ -373,8 +380,7 @@ async def new_iterator_async(): span.__exit__(None, None, None) -def _new_chat_completion_common(f, *args, **kwargs): - # type: (Any, Any, Any) -> Any +def _new_chat_completion_common(f: Any, *args: Any, **kwargs: Any) -> Any: integration = sentry_sdk.get_client().get_integration(OpenAIIntegration) if integration is None: return f(*args, **kwargs) @@ -408,10 +414,8 @@ def _new_chat_completion_common(f, *args, **kwargs): return response -def _wrap_chat_completion_create(f): - # type: (Callable[..., Any]) -> Callable[..., Any] - def _execute_sync(f, *args, **kwargs): - # type: (Any, Any, Any) -> Any +def _wrap_chat_completion_create(f: Callable[..., Any]) -> Callable[..., Any]: + def _execute_sync(f: Any, *args: Any, **kwargs: Any) -> Any: gen = _new_chat_completion_common(f, *args, **kwargs) try: @@ -431,8 +435,7 @@ def _execute_sync(f, *args, **kwargs): return e.value @wraps(f) - def _sentry_patched_create_sync(*args, **kwargs): - # type: (Any, Any) -> Any + def _sentry_patched_create_sync(*args: Any, **kwargs: Any) -> Any: integration = sentry_sdk.get_client().get_integration(OpenAIIntegration) if integration is None or "messages" not in kwargs: # no "messages" means invalid call (in all versions of openai), let it return error @@ -443,10 +446,8 @@ def _sentry_patched_create_sync(*args, **kwargs): return _sentry_patched_create_sync -def _wrap_async_chat_completion_create(f): - # type: (Callable[..., Any]) -> Callable[..., Any] - async def _execute_async(f, *args, **kwargs): - # type: (Any, Any, Any) -> Any +def _wrap_async_chat_completion_create(f: Callable[..., Any]) -> Callable[..., Any]: + async def _execute_async(f: Any, *args: Any, **kwargs: Any) -> Any: gen = _new_chat_completion_common(f, *args, **kwargs) try: @@ -466,8 +467,7 @@ async def _execute_async(f, *args, **kwargs): return e.value @wraps(f) - async def _sentry_patched_create_async(*args, **kwargs): - # type: (Any, Any) -> Any + async def _sentry_patched_create_async(*args: Any, **kwargs: Any) -> Any: integration = sentry_sdk.get_client().get_integration(OpenAIIntegration) if integration is None or "messages" not in kwargs: # no "messages" means invalid call (in all versions of openai), let it return error @@ -478,8 +478,7 @@ async def _sentry_patched_create_async(*args, **kwargs): return _sentry_patched_create_async -def _new_embeddings_create_common(f, *args, **kwargs): - # type: (Any, Any, Any) -> Any +def _new_embeddings_create_common(f: Any, *args: Any, **kwargs: Any) -> Any: integration = sentry_sdk.get_client().get_integration(OpenAIIntegration) if integration is None: return f(*args, **kwargs) @@ -501,10 +500,8 @@ def _new_embeddings_create_common(f, *args, **kwargs): return response -def _wrap_embeddings_create(f): - # type: (Any) -> Any - def _execute_sync(f, *args, **kwargs): - # type: (Any, Any, Any) -> Any +def _wrap_embeddings_create(f: Any) -> Any: + def _execute_sync(f: Any, *args: Any, **kwargs: Any) -> Any: gen = _new_embeddings_create_common(f, *args, **kwargs) try: @@ -524,8 +521,7 @@ def _execute_sync(f, *args, **kwargs): return e.value @wraps(f) - def _sentry_patched_create_sync(*args, **kwargs): - # type: (Any, Any) -> Any + def _sentry_patched_create_sync(*args: Any, **kwargs: Any) -> Any: integration = sentry_sdk.get_client().get_integration(OpenAIIntegration) if integration is None: return f(*args, **kwargs) @@ -535,10 +531,8 @@ def _sentry_patched_create_sync(*args, **kwargs): return _sentry_patched_create_sync -def _wrap_async_embeddings_create(f): - # type: (Any) -> Any - async def _execute_async(f, *args, **kwargs): - # type: (Any, Any, Any) -> Any +def _wrap_async_embeddings_create(f: Any) -> Any: + async def _execute_async(f: Any, *args: Any, **kwargs: Any) -> Any: gen = _new_embeddings_create_common(f, *args, **kwargs) try: @@ -558,8 +552,7 @@ async def _execute_async(f, *args, **kwargs): return e.value @wraps(f) - async def _sentry_patched_create_async(*args, **kwargs): - # type: (Any, Any) -> Any + async def _sentry_patched_create_async(*args: Any, **kwargs: Any) -> Any: integration = sentry_sdk.get_client().get_integration(OpenAIIntegration) if integration is None: return await f(*args, **kwargs) @@ -569,8 +562,7 @@ async def _sentry_patched_create_async(*args, **kwargs): return _sentry_patched_create_async -def _new_responses_create_common(f, *args, **kwargs): - # type: (Any, Any, Any) -> Any +def _new_responses_create_common(f: Any, *args: Any, **kwargs: Any) -> Any: integration = sentry_sdk.get_client().get_integration(OpenAIIntegration) if integration is None: return f(*args, **kwargs) @@ -594,10 +586,8 @@ def _new_responses_create_common(f, *args, **kwargs): return response -def _wrap_responses_create(f): - # type: (Any) -> Any - def _execute_sync(f, *args, **kwargs): - # type: (Any, Any, Any) -> Any +def _wrap_responses_create(f: Any) -> Any: + def _execute_sync(f: Any, *args: Any, **kwargs: Any) -> Any: gen = _new_responses_create_common(f, *args, **kwargs) try: @@ -617,8 +607,7 @@ def _execute_sync(f, *args, **kwargs): return e.value @wraps(f) - def _sentry_patched_create_sync(*args, **kwargs): - # type: (Any, Any) -> Any + def _sentry_patched_create_sync(*args: Any, **kwargs: Any) -> Any: integration = sentry_sdk.get_client().get_integration(OpenAIIntegration) if integration is None: return f(*args, **kwargs) @@ -628,10 +617,8 @@ def _sentry_patched_create_sync(*args, **kwargs): return _sentry_patched_create_sync -def _wrap_async_responses_create(f): - # type: (Any) -> Any - async def _execute_async(f, *args, **kwargs): - # type: (Any, Any, Any) -> Any +def _wrap_async_responses_create(f: Any) -> Any: + async def _execute_async(f: Any, *args: Any, **kwargs: Any) -> Any: gen = _new_responses_create_common(f, *args, **kwargs) try: @@ -651,8 +638,7 @@ async def _execute_async(f, *args, **kwargs): return e.value @wraps(f) - async def _sentry_patched_responses_async(*args, **kwargs): - # type: (Any, Any) -> Any + async def _sentry_patched_responses_async(*args: Any, **kwargs: Any) -> Any: integration = sentry_sdk.get_client().get_integration(OpenAIIntegration) if integration is None: return await f(*args, **kwargs) diff --git a/sentry_sdk/integrations/openai_agents/__init__.py b/sentry_sdk/integrations/openai_agents/__init__.py index 06b6459441..384cf9c651 100644 --- a/sentry_sdk/integrations/openai_agents/__init__.py +++ b/sentry_sdk/integrations/openai_agents/__init__.py @@ -14,8 +14,7 @@ raise DidNotEnable("OpenAI Agents not installed") -def _patch_runner(): - # type: () -> None +def _patch_runner() -> None: # Create the root span for one full agent run (including eventual handoffs) # Note agents.run.DEFAULT_AGENT_RUNNER.run_sync is a wrapper around # agents.run.DEFAULT_AGENT_RUNNER.run. It does not need to be wrapped separately. @@ -28,15 +27,13 @@ def _patch_runner(): _patch_agent_run() -def _patch_model(): - # type: () -> None +def _patch_model() -> None: agents.run.AgentRunner._get_model = classmethod( _create_get_model_wrapper(agents.run.AgentRunner._get_model), ) -def _patch_tools(): - # type: () -> None +def _patch_tools() -> None: agents.run.AgentRunner._get_all_tools = classmethod( _create_get_all_tools_wrapper(agents.run.AgentRunner._get_all_tools), ) @@ -46,8 +43,7 @@ class OpenAIAgentsIntegration(Integration): identifier = "openai_agents" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: _patch_tools() _patch_model() _patch_runner() diff --git a/sentry_sdk/integrations/openai_agents/patches/agent_run.py b/sentry_sdk/integrations/openai_agents/patches/agent_run.py index 084100878c..984be12367 100644 --- a/sentry_sdk/integrations/openai_agents/patches/agent_run.py +++ b/sentry_sdk/integrations/openai_agents/patches/agent_run.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from functools import wraps from sentry_sdk.integrations import DidNotEnable @@ -16,8 +18,7 @@ raise DidNotEnable("OpenAI Agents not installed") -def _patch_agent_run(): - # type: () -> None +def _patch_agent_run() -> None: """ Patches AgentRunner methods to create agent invocation spans. This directly patches the execution flow to track when agents start and stop. @@ -28,15 +29,19 @@ def _patch_agent_run(): original_execute_handoffs = agents._run_impl.RunImpl.execute_handoffs original_execute_final_output = agents._run_impl.RunImpl.execute_final_output - def _start_invoke_agent_span(context_wrapper, agent): - # type: (agents.RunContextWrapper, agents.Agent) -> None + def _start_invoke_agent_span( + context_wrapper: agents.RunContextWrapper, agent: agents.Agent + ) -> None: """Start an agent invocation span""" # Store the agent on the context wrapper so we can access it later context_wrapper._sentry_current_agent = agent invoke_agent_span(context_wrapper, agent) - def _end_invoke_agent_span(context_wrapper, agent, output=None): - # type: (agents.RunContextWrapper, agents.Agent, Optional[Any]) -> None + def _end_invoke_agent_span( + context_wrapper: agents.RunContextWrapper, + agent: agents.Agent, + output: Optional[Any] = None, + ) -> None: """End the agent invocation span""" # Clear the stored agent if hasattr(context_wrapper, "_sentry_current_agent"): @@ -44,13 +49,13 @@ def _end_invoke_agent_span(context_wrapper, agent, output=None): update_invoke_agent_span(context_wrapper, agent, output) - def _has_active_agent_span(context_wrapper): - # type: (agents.RunContextWrapper) -> bool + def _has_active_agent_span(context_wrapper: agents.RunContextWrapper) -> bool: """Check if there's an active agent span for this context""" return getattr(context_wrapper, "_sentry_current_agent", None) is not None - def _get_current_agent(context_wrapper): - # type: (agents.RunContextWrapper) -> Optional[agents.Agent] + def _get_current_agent( + context_wrapper: agents.RunContextWrapper, + ) -> Optional[agents.Agent]: """Get the current agent from context wrapper""" return getattr(context_wrapper, "_sentry_current_agent", None) @@ -59,10 +64,10 @@ def _get_current_agent(context_wrapper): if hasattr(original_run_single_turn, "__func__") else original_run_single_turn ) - async def patched_run_single_turn(cls, *args, **kwargs): - # type: (agents.Runner, *Any, **Any) -> Any + async def patched_run_single_turn( + cls: agents.Runner, *args: Any, **kwargs: Any + ) -> Any: """Patched _run_single_turn that creates agent invocation spans""" - agent = kwargs.get("agent") context_wrapper = kwargs.get("context_wrapper") should_run_agent_start_hooks = kwargs.get("should_run_agent_start_hooks") @@ -78,7 +83,11 @@ async def patched_run_single_turn(cls, *args, **kwargs): _start_invoke_agent_span(context_wrapper, agent) # Call original method with all the correct parameters - result = await original_run_single_turn(*args, **kwargs) + try: + result = await original_run_single_turn(*args, **kwargs) + finally: + if agent and context_wrapper and _has_active_agent_span(context_wrapper): + _end_invoke_agent_span(context_wrapper, agent) return result @@ -87,10 +96,10 @@ async def patched_run_single_turn(cls, *args, **kwargs): if hasattr(original_execute_handoffs, "__func__") else original_execute_handoffs ) - async def patched_execute_handoffs(cls, *args, **kwargs): - # type: (agents.Runner, *Any, **Any) -> Any + async def patched_execute_handoffs( + cls: agents.Runner, *args: Any, **kwargs: Any + ) -> Any: """Patched execute_handoffs that creates handoff spans and ends agent span for handoffs""" - context_wrapper = kwargs.get("context_wrapper") run_handoffs = kwargs.get("run_handoffs") agent = kwargs.get("agent") @@ -117,10 +126,10 @@ async def patched_execute_handoffs(cls, *args, **kwargs): if hasattr(original_execute_final_output, "__func__") else original_execute_final_output ) - async def patched_execute_final_output(cls, *args, **kwargs): - # type: (agents.Runner, *Any, **Any) -> Any + async def patched_execute_final_output( + cls: agents.Runner, *args: Any, **kwargs: Any + ) -> Any: """Patched execute_final_output that ends agent span for final outputs""" - agent = kwargs.get("agent") context_wrapper = kwargs.get("context_wrapper") final_output = kwargs.get("final_output") diff --git a/sentry_sdk/integrations/openai_agents/patches/models.py b/sentry_sdk/integrations/openai_agents/patches/models.py index e6f24da6a1..e1dbb58719 100644 --- a/sentry_sdk/integrations/openai_agents/patches/models.py +++ b/sentry_sdk/integrations/openai_agents/patches/models.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from functools import wraps from sentry_sdk.integrations import DidNotEnable @@ -16,8 +18,9 @@ raise DidNotEnable("OpenAI Agents not installed") -def _create_get_model_wrapper(original_get_model): - # type: (Callable[..., Any]) -> Callable[..., Any] +def _create_get_model_wrapper( + original_get_model: Callable[..., Any], +) -> Callable[..., Any]: """ Wraps the agents.Runner._get_model method to wrap the get_response method of the model to create a AI client span. """ @@ -27,15 +30,14 @@ def _create_get_model_wrapper(original_get_model): if hasattr(original_get_model, "__func__") else original_get_model ) - def wrapped_get_model(cls, agent, run_config): - # type: (agents.Runner, agents.Agent, agents.RunConfig) -> agents.Model - + def wrapped_get_model( + cls: agents.Runner, agent: agents.Agent, run_config: agents.RunConfig + ) -> agents.Model: model = original_get_model(agent, run_config) original_get_response = model.get_response @wraps(original_get_response) - async def wrapped_get_response(*args, **kwargs): - # type: (*Any, **Any) -> Any + async def wrapped_get_response(*args: Any, **kwargs: Any) -> Any: with ai_client_span(agent, kwargs) as span: result = await original_get_response(*args, **kwargs) diff --git a/sentry_sdk/integrations/openai_agents/patches/runner.py b/sentry_sdk/integrations/openai_agents/patches/runner.py index e1e9a3b50c..22d5c4afff 100644 --- a/sentry_sdk/integrations/openai_agents/patches/runner.py +++ b/sentry_sdk/integrations/openai_agents/patches/runner.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from functools import wraps import sentry_sdk @@ -11,8 +13,7 @@ from typing import Any, Callable -def _create_run_wrapper(original_func): - # type: (Callable[..., Any]) -> Callable[..., Any] +def _create_run_wrapper(original_func: Callable[..., Any]) -> Callable[..., Any]: """ Wraps the agents.Runner.run methods to create a root span for the agent workflow runs. @@ -21,8 +22,7 @@ def _create_run_wrapper(original_func): """ @wraps(original_func) - async def wrapper(*args, **kwargs): - # type: (*Any, **Any) -> Any + async def wrapper(*args: Any, **kwargs: Any) -> Any: agent = args[0] with agent_workflow_span(agent): result = None @@ -33,9 +33,9 @@ async def wrapper(*args, **kwargs): _capture_exception(exc) # It could be that there is a "invoke agent" span still open - current_span = sentry_sdk.get_current_span() - if current_span is not None and current_span.timestamp is None: - current_span.__exit__(None, None, None) + span = sentry_sdk.get_current_span() + if span is not None and span.timestamp is None: + span.__exit__(None, None, None) raise exc from None diff --git a/sentry_sdk/integrations/openai_agents/patches/tools.py b/sentry_sdk/integrations/openai_agents/patches/tools.py index b359d32678..ba75cd0eb8 100644 --- a/sentry_sdk/integrations/openai_agents/patches/tools.py +++ b/sentry_sdk/integrations/openai_agents/patches/tools.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from functools import wraps from sentry_sdk.integrations import DidNotEnable @@ -15,8 +17,9 @@ raise DidNotEnable("OpenAI Agents not installed") -def _create_get_all_tools_wrapper(original_get_all_tools): - # type: (Callable[..., Any]) -> Callable[..., Any] +def _create_get_all_tools_wrapper( + original_get_all_tools: Callable[..., Any], +) -> Callable[..., Any]: """ Wraps the agents.Runner._get_all_tools method of the Runner class to wrap all function tools with Sentry instrumentation. """ @@ -26,9 +29,11 @@ def _create_get_all_tools_wrapper(original_get_all_tools): if hasattr(original_get_all_tools, "__func__") else original_get_all_tools ) - async def wrapped_get_all_tools(cls, agent, context_wrapper): - # type: (agents.Runner, agents.Agent, agents.RunContextWrapper) -> list[agents.Tool] - + async def wrapped_get_all_tools( + cls: agents.Runner, + agent: agents.Agent, + context_wrapper: agents.RunContextWrapper, + ) -> list[agents.Tool]: # Get the original tools tools = await original_get_all_tools(agent, context_wrapper) @@ -42,11 +47,13 @@ async def wrapped_get_all_tools(cls, agent, context_wrapper): # Create a new FunctionTool with our wrapped invoke method original_on_invoke = tool.on_invoke_tool - def create_wrapped_invoke(current_tool, current_on_invoke): - # type: (agents.Tool, Callable[..., Any]) -> Callable[..., Any] + def create_wrapped_invoke( + current_tool: agents.Tool, current_on_invoke: Callable[..., Any] + ) -> Callable[..., Any]: @wraps(current_on_invoke) - async def sentry_wrapped_on_invoke_tool(*args, **kwargs): - # type: (*Any, **Any) -> Any + async def sentry_wrapped_on_invoke_tool( + *args: Any, **kwargs: Any + ) -> Any: with execute_tool_span(current_tool, *args, **kwargs) as span: # We can not capture exceptions in tool execution here because # `_on_invoke_tool` is swallowing the exception here: diff --git a/sentry_sdk/integrations/openai_agents/spans/agent_workflow.py b/sentry_sdk/integrations/openai_agents/spans/agent_workflow.py index de2f28d41e..4d12fff782 100644 --- a/sentry_sdk/integrations/openai_agents/spans/agent_workflow.py +++ b/sentry_sdk/integrations/openai_agents/spans/agent_workflow.py @@ -1,7 +1,8 @@ +from __future__ import annotations + import sentry_sdk from ..consts import SPAN_ORIGIN -from ..utils import _get_start_span_function from typing import TYPE_CHECKING @@ -9,11 +10,9 @@ import agents -def agent_workflow_span(agent): - # type: (agents.Agent) -> sentry_sdk.tracing.Span - +def agent_workflow_span(agent: agents.Agent) -> sentry_sdk.tracing.Span: # Create a transaction or a span if an transaction is already active - span = _get_start_span_function()( + span = sentry_sdk.start_span( name=f"{agent.name} workflow", origin=SPAN_ORIGIN, ) diff --git a/sentry_sdk/integrations/openai_agents/spans/ai_client.py b/sentry_sdk/integrations/openai_agents/spans/ai_client.py index d325ae86e3..a67d3add5c 100644 --- a/sentry_sdk/integrations/openai_agents/spans/ai_client.py +++ b/sentry_sdk/integrations/openai_agents/spans/ai_client.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sentry_sdk from sentry_sdk.consts import OP, SPANDATA @@ -16,23 +18,28 @@ from typing import Any -def ai_client_span(agent, get_response_kwargs): - # type: (Agent, dict[str, Any]) -> sentry_sdk.tracing.Span - # TODO-anton: implement other types of operations. Now "chat" is hardcoded. +def ai_client_span( + agent: Agent, get_response_kwargs: dict[str, Any] +) -> sentry_sdk.tracing.Span: model_name = agent.model.model if hasattr(agent.model, "model") else agent.model + # TODO-anton: implement other types of operations. Now "chat" is hardcoded. span = sentry_sdk.start_span( op=OP.GEN_AI_CHAT, description=f"chat {model_name}", origin=SPAN_ORIGIN, ) # TODO-anton: remove hardcoded stuff and replace something that also works for embedding and so on - span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "chat") + span.set_attribute(SPANDATA.GEN_AI_OPERATION_NAME, "chat") return span -def update_ai_client_span(span, agent, get_response_kwargs, result): - # type: (sentry_sdk.tracing.Span, Agent, dict[str, Any], Any) -> None +def update_ai_client_span( + span: sentry_sdk.tracing.Span, + agent: Agent, + get_response_kwargs: dict[str, Any], + result: Any, +) -> None: _set_agent_data(span, agent) _set_usage_data(span, result.usage) _set_input_data(span, get_response_kwargs) diff --git a/sentry_sdk/integrations/openai_agents/spans/execute_tool.py b/sentry_sdk/integrations/openai_agents/spans/execute_tool.py index 5f9e4cb340..af130a137e 100644 --- a/sentry_sdk/integrations/openai_agents/spans/execute_tool.py +++ b/sentry_sdk/integrations/openai_agents/spans/execute_tool.py @@ -1,5 +1,7 @@ +from __future__ import annotations + import sentry_sdk -from sentry_sdk.consts import OP, SPANDATA, SPANSTATUS +from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.scope import should_send_default_pii from ..consts import SPAN_ORIGIN @@ -12,37 +14,34 @@ from typing import Any -def execute_tool_span(tool, *args, **kwargs): - # type: (agents.Tool, *Any, **Any) -> sentry_sdk.tracing.Span +def execute_tool_span( + tool: agents.Tool, *args: Any, **kwargs: Any +) -> sentry_sdk.tracing.Span: span = sentry_sdk.start_span( op=OP.GEN_AI_EXECUTE_TOOL, name=f"execute_tool {tool.name}", origin=SPAN_ORIGIN, ) - span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "execute_tool") + span.set_attribute(SPANDATA.GEN_AI_OPERATION_NAME, "execute_tool") if tool.__class__.__name__ == "FunctionTool": - span.set_data(SPANDATA.GEN_AI_TOOL_TYPE, "function") + span.set_attribute(SPANDATA.GEN_AI_TOOL_TYPE, "function") - span.set_data(SPANDATA.GEN_AI_TOOL_NAME, tool.name) - span.set_data(SPANDATA.GEN_AI_TOOL_DESCRIPTION, tool.description) + span.set_attribute(SPANDATA.GEN_AI_TOOL_NAME, tool.name) + span.set_attribute(SPANDATA.GEN_AI_TOOL_DESCRIPTION, tool.description) if should_send_default_pii(): input = args[1] - span.set_data(SPANDATA.GEN_AI_TOOL_INPUT, input) + span.set_attribute(SPANDATA.GEN_AI_TOOL_INPUT, input) return span -def update_execute_tool_span(span, agent, tool, result): - # type: (sentry_sdk.tracing.Span, agents.Agent, agents.Tool, Any) -> None +def update_execute_tool_span( + span: sentry_sdk.tracing.Span, agent: agents.Agent, tool: agents.Tool, result: Any +) -> None: _set_agent_data(span, agent) - if isinstance(result, str) and result.startswith( - "An error occurred while running the tool" - ): - span.set_status(SPANSTATUS.INTERNAL_ERROR) - if should_send_default_pii(): - span.set_data(SPANDATA.GEN_AI_TOOL_OUTPUT, result) + span.set_attribute(SPANDATA.GEN_AI_TOOL_OUTPUT, result) diff --git a/sentry_sdk/integrations/openai_agents/spans/handoff.py b/sentry_sdk/integrations/openai_agents/spans/handoff.py index 78e6788c7d..daa6579537 100644 --- a/sentry_sdk/integrations/openai_agents/spans/handoff.py +++ b/sentry_sdk/integrations/openai_agents/spans/handoff.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sentry_sdk from sentry_sdk.consts import OP, SPANDATA @@ -9,11 +11,14 @@ import agents -def handoff_span(context, from_agent, to_agent_name): - # type: (agents.RunContextWrapper, agents.Agent, str) -> None +def handoff_span( + context_wrapper: agents.RunContextWrapper, + from_agent: agents.Agent, + to_agent_name: str, +) -> None: with sentry_sdk.start_span( op=OP.GEN_AI_HANDOFF, name=f"handoff from {from_agent.name} to {to_agent_name}", origin=SPAN_ORIGIN, ) as span: - span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "handoff") + span.set_attribute(SPANDATA.GEN_AI_OPERATION_NAME, "handoff") diff --git a/sentry_sdk/integrations/openai_agents/spans/invoke_agent.py b/sentry_sdk/integrations/openai_agents/spans/invoke_agent.py index 549ade1246..b4f9043616 100644 --- a/sentry_sdk/integrations/openai_agents/spans/invoke_agent.py +++ b/sentry_sdk/integrations/openai_agents/spans/invoke_agent.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sentry_sdk from sentry_sdk.consts import OP, SPANDATA @@ -11,8 +13,9 @@ from typing import Any -def invoke_agent_span(context, agent): - # type: (agents.RunContextWrapper, agents.Agent) -> sentry_sdk.tracing.Span +def invoke_agent_span( + context_wrapper: agents.RunContextWrapper, agent: agents.Agent +) -> sentry_sdk.tracing.Span: span = sentry_sdk.start_span( op=OP.GEN_AI_INVOKE_AGENT, name=f"invoke_agent {agent.name}", @@ -20,15 +23,19 @@ def invoke_agent_span(context, agent): ) span.__enter__() - span.set_data(SPANDATA.GEN_AI_OPERATION_NAME, "invoke_agent") + span.set_attribute(SPANDATA.GEN_AI_OPERATION_NAME, "invoke_agent") _set_agent_data(span, agent) + context_wrapper._sentry_invoke_agent_span = span + return span -def update_invoke_agent_span(context, agent, output): - # type: (agents.RunContextWrapper, agents.Agent, Any) -> None - current_span = sentry_sdk.get_current_span() - if current_span: - current_span.__exit__(None, None, None) +def update_invoke_agent_span( + context_wrapper: agents.RunContextWrapper, agent: agents.Agent, output: Any +) -> None: + span = getattr(context_wrapper, "_sentry_invoke_agent_span", None) + if span is not None: + span.__exit__(None, None, None) + del context_wrapper._sentry_invoke_agent_span diff --git a/sentry_sdk/integrations/openai_agents/utils.py b/sentry_sdk/integrations/openai_agents/utils.py index 1525346726..c4e456e9c0 100644 --- a/sentry_sdk/integrations/openai_agents/utils.py +++ b/sentry_sdk/integrations/openai_agents/utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sentry_sdk from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations import DidNotEnable @@ -8,7 +10,6 @@ if TYPE_CHECKING: from typing import Any - from typing import Callable from agents import Usage try: @@ -18,8 +19,7 @@ raise DidNotEnable("OpenAI Agents not installed") -def _capture_exception(exc): - # type: (Any) -> None +def _capture_exception(exc: Any) -> None: event, hint = event_from_exception( exc, client_options=sentry_sdk.get_client().options, @@ -28,85 +28,75 @@ def _capture_exception(exc): sentry_sdk.capture_event(event, hint=hint) -def _get_start_span_function(): - # type: () -> Callable[..., Any] - current_span = sentry_sdk.get_current_span() - transaction_exists = ( - current_span is not None and current_span.containing_transaction == current_span - ) - return sentry_sdk.start_span if transaction_exists else sentry_sdk.start_transaction - - -def _set_agent_data(span, agent): - # type: (sentry_sdk.tracing.Span, agents.Agent) -> None - span.set_data( +def _set_agent_data(span: sentry_sdk.tracing.Span, agent: agents.Agent) -> None: + span.set_attribute( SPANDATA.GEN_AI_SYSTEM, "openai" ) # See footnote for https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#gen-ai-system for explanation why. - span.set_data(SPANDATA.GEN_AI_AGENT_NAME, agent.name) + span.set_attribute(SPANDATA.GEN_AI_AGENT_NAME, agent.name) if agent.model_settings.max_tokens: - span.set_data( + span.set_attribute( SPANDATA.GEN_AI_REQUEST_MAX_TOKENS, agent.model_settings.max_tokens ) if agent.model: model_name = agent.model.model if hasattr(agent.model, "model") else agent.model - span.set_data(SPANDATA.GEN_AI_REQUEST_MODEL, model_name) + span.set_attribute(SPANDATA.GEN_AI_REQUEST_MODEL, model_name) if agent.model_settings.presence_penalty: - span.set_data( + span.set_attribute( SPANDATA.GEN_AI_REQUEST_PRESENCE_PENALTY, agent.model_settings.presence_penalty, ) if agent.model_settings.temperature: - span.set_data( + span.set_attribute( SPANDATA.GEN_AI_REQUEST_TEMPERATURE, agent.model_settings.temperature ) if agent.model_settings.top_p: - span.set_data(SPANDATA.GEN_AI_REQUEST_TOP_P, agent.model_settings.top_p) + span.set_attribute(SPANDATA.GEN_AI_REQUEST_TOP_P, agent.model_settings.top_p) if agent.model_settings.frequency_penalty: - span.set_data( + span.set_attribute( SPANDATA.GEN_AI_REQUEST_FREQUENCY_PENALTY, agent.model_settings.frequency_penalty, ) if len(agent.tools) > 0: - span.set_data( + span.set_attribute( SPANDATA.GEN_AI_REQUEST_AVAILABLE_TOOLS, safe_serialize([vars(tool) for tool in agent.tools]), ) -def _set_usage_data(span, usage): - # type: (sentry_sdk.tracing.Span, Usage) -> None - span.set_data(SPANDATA.GEN_AI_USAGE_INPUT_TOKENS, usage.input_tokens) - span.set_data( +def _set_usage_data(span: sentry_sdk.tracing.Span, usage: Usage) -> None: + span.set_attribute(SPANDATA.GEN_AI_USAGE_INPUT_TOKENS, usage.input_tokens) + span.set_attribute( SPANDATA.GEN_AI_USAGE_INPUT_TOKENS_CACHED, usage.input_tokens_details.cached_tokens, ) - span.set_data(SPANDATA.GEN_AI_USAGE_OUTPUT_TOKENS, usage.output_tokens) - span.set_data( + span.set_attribute(SPANDATA.GEN_AI_USAGE_OUTPUT_TOKENS, usage.output_tokens) + span.set_attribute( SPANDATA.GEN_AI_USAGE_OUTPUT_TOKENS_REASONING, usage.output_tokens_details.reasoning_tokens, ) - span.set_data(SPANDATA.GEN_AI_USAGE_TOTAL_TOKENS, usage.total_tokens) + span.set_attribute(SPANDATA.GEN_AI_USAGE_TOTAL_TOKENS, usage.total_tokens) -def _set_input_data(span, get_response_kwargs): - # type: (sentry_sdk.tracing.Span, dict[str, Any]) -> None +def _set_input_data( + span: sentry_sdk.tracing.Span, get_response_kwargs: dict[str, Any] +) -> None: if not should_send_default_pii(): return - messages_by_role = { + messages_by_role: dict[str, list[Any]] = { "system": [], "user": [], "assistant": [], "tool": [], - } # type: (dict[str, list[Any]]) + } system_instructions = get_response_kwargs.get("system_instructions") if system_instructions: messages_by_role["system"].append({"type": "text", "text": system_instructions}) @@ -127,22 +117,23 @@ def _set_input_data(span, get_response_kwargs): if len(messages) > 0: request_messages.append({"role": role, "content": messages}) - span.set_data(SPANDATA.GEN_AI_REQUEST_MESSAGES, safe_serialize(request_messages)) + span.set_attribute( + SPANDATA.GEN_AI_REQUEST_MESSAGES, safe_serialize(request_messages) + ) -def _set_output_data(span, result): - # type: (sentry_sdk.tracing.Span, Any) -> None +def _set_output_data(span: sentry_sdk.tracing.Span, result: Any) -> None: if not should_send_default_pii(): return - output_messages = { + output_messages: dict[str, list[Any]] = { "response": [], "tool": [], - } # type: (dict[str, list[Any]]) + } for output in result.output: if output.type == "function_call": - output_messages["tool"].append(output.dict()) + output_messages["tool"].append(output.model_dump()) elif output.type == "message": for output_message in output.content: try: @@ -152,11 +143,11 @@ def _set_output_data(span, result): output_messages["response"].append(output_message.dict()) if len(output_messages["tool"]) > 0: - span.set_data( + span.set_attribute( SPANDATA.GEN_AI_RESPONSE_TOOL_CALLS, safe_serialize(output_messages["tool"]) ) if len(output_messages["response"]) > 0: - span.set_data( + span.set_attribute( SPANDATA.GEN_AI_RESPONSE_TEXT, safe_serialize(output_messages["response"]) ) diff --git a/sentry_sdk/integrations/openfeature.py b/sentry_sdk/integrations/openfeature.py index 3ac73edd93..a525443f6a 100644 --- a/sentry_sdk/integrations/openfeature.py +++ b/sentry_sdk/integrations/openfeature.py @@ -1,3 +1,4 @@ +from __future__ import annotations from typing import TYPE_CHECKING, Any from sentry_sdk.feature_flags import add_feature_flag @@ -17,19 +18,24 @@ class OpenFeatureIntegration(Integration): identifier = "openfeature" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: # Register the hook within the global openfeature hooks list. api.add_hooks(hooks=[OpenFeatureHook()]) class OpenFeatureHook(Hook): - def after(self, hook_context, details, hints): - # type: (Any, Any, Any) -> None + + def after( + self, + hook_context: Any, + details: Any, + hints: Any, + ) -> None: if isinstance(details.value, bool): add_feature_flag(details.flag_key, details.value) - def error(self, hook_context, exception, hints): - # type: (HookContext, Exception, HookHints) -> None + def error( + self, hook_context: HookContext, exception: Exception, hints: HookHints + ) -> None: if isinstance(hook_context.default_value, bool): add_feature_flag(hook_context.flag_key, hook_context.default_value) diff --git a/sentry_sdk/integrations/opentelemetry/__init__.py b/sentry_sdk/integrations/opentelemetry/__init__.py deleted file mode 100644 index 3c4c1a683d..0000000000 --- a/sentry_sdk/integrations/opentelemetry/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor -from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator - -__all__ = [ - "SentryPropagator", - "SentrySpanProcessor", -] diff --git a/sentry_sdk/integrations/opentelemetry/consts.py b/sentry_sdk/integrations/opentelemetry/consts.py deleted file mode 100644 index ec493449d3..0000000000 --- a/sentry_sdk/integrations/opentelemetry/consts.py +++ /dev/null @@ -1,5 +0,0 @@ -from opentelemetry.context import create_key - - -SENTRY_TRACE_KEY = create_key("sentry-trace") -SENTRY_BAGGAGE_KEY = create_key("sentry-baggage") diff --git a/sentry_sdk/integrations/opentelemetry/integration.py b/sentry_sdk/integrations/opentelemetry/integration.py deleted file mode 100644 index 43e0396c16..0000000000 --- a/sentry_sdk/integrations/opentelemetry/integration.py +++ /dev/null @@ -1,58 +0,0 @@ -""" -IMPORTANT: The contents of this file are part of a proof of concept and as such -are experimental and not suitable for production use. They may be changed or -removed at any time without prior notice. -""" - -from sentry_sdk.integrations import DidNotEnable, Integration -from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator -from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor -from sentry_sdk.utils import logger - -try: - from opentelemetry import trace - from opentelemetry.propagate import set_global_textmap - from opentelemetry.sdk.trace import TracerProvider -except ImportError: - raise DidNotEnable("opentelemetry not installed") - -try: - from opentelemetry.instrumentation.django import DjangoInstrumentor # type: ignore[import-not-found] -except ImportError: - DjangoInstrumentor = None - - -CONFIGURABLE_INSTRUMENTATIONS = { - DjangoInstrumentor: {"is_sql_commentor_enabled": True}, -} - - -class OpenTelemetryIntegration(Integration): - identifier = "opentelemetry" - - @staticmethod - def setup_once(): - # type: () -> None - logger.warning( - "[OTel] Initializing highly experimental OpenTelemetry support. " - "Use at your own risk." - ) - - _setup_sentry_tracing() - # _setup_instrumentors() - - logger.debug("[OTel] Finished setting up OpenTelemetry integration") - - -def _setup_sentry_tracing(): - # type: () -> None - provider = TracerProvider() - provider.add_span_processor(SentrySpanProcessor()) - trace.set_tracer_provider(provider) - set_global_textmap(SentryPropagator()) - - -def _setup_instrumentors(): - # type: () -> None - for instrumentor, kwargs in CONFIGURABLE_INSTRUMENTATIONS.items(): - instrumentor().instrument(**kwargs) diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py deleted file mode 100644 index e00562a509..0000000000 --- a/sentry_sdk/integrations/opentelemetry/span_processor.py +++ /dev/null @@ -1,391 +0,0 @@ -from datetime import datetime, timezone -from time import time -from typing import TYPE_CHECKING, cast - -from opentelemetry.context import get_value -from opentelemetry.sdk.trace import SpanProcessor, ReadableSpan as OTelSpan -from opentelemetry.semconv.trace import SpanAttributes -from opentelemetry.trace import ( - format_span_id, - format_trace_id, - get_current_span, - SpanKind, -) -from opentelemetry.trace.span import ( - INVALID_SPAN_ID, - INVALID_TRACE_ID, -) -from sentry_sdk import get_client, start_transaction -from sentry_sdk.consts import INSTRUMENTER, SPANSTATUS -from sentry_sdk.integrations.opentelemetry.consts import ( - SENTRY_BAGGAGE_KEY, - SENTRY_TRACE_KEY, -) -from sentry_sdk.scope import add_global_event_processor -from sentry_sdk.tracing import Transaction, Span as SentrySpan -from sentry_sdk.utils import Dsn - -from urllib3.util import parse_url as urlparse - -if TYPE_CHECKING: - from typing import Any, Optional, Union - from opentelemetry import context as context_api - from sentry_sdk._types import Event, Hint - -OPEN_TELEMETRY_CONTEXT = "otel" -SPAN_MAX_TIME_OPEN_MINUTES = 10 -SPAN_ORIGIN = "auto.otel" - - -def link_trace_context_to_error_event(event, otel_span_map): - # type: (Event, dict[str, Union[Transaction, SentrySpan]]) -> Event - client = get_client() - - if client.options["instrumenter"] != INSTRUMENTER.OTEL: - return event - - if hasattr(event, "type") and event["type"] == "transaction": - return event - - otel_span = get_current_span() - if not otel_span: - return event - - ctx = otel_span.get_span_context() - - if ctx.trace_id == INVALID_TRACE_ID or ctx.span_id == INVALID_SPAN_ID: - return event - - sentry_span = otel_span_map.get(format_span_id(ctx.span_id), None) - if not sentry_span: - return event - - contexts = event.setdefault("contexts", {}) - contexts.setdefault("trace", {}).update(sentry_span.get_trace_context()) - - return event - - -class SentrySpanProcessor(SpanProcessor): - """ - Converts OTel spans into Sentry spans so they can be sent to the Sentry backend. - """ - - # The mapping from otel span ids to sentry spans - otel_span_map = {} # type: dict[str, Union[Transaction, SentrySpan]] - - # The currently open spans. Elements will be discarded after SPAN_MAX_TIME_OPEN_MINUTES - open_spans = {} # type: dict[int, set[str]] - - def __new__(cls): - # type: () -> SentrySpanProcessor - if not hasattr(cls, "instance"): - cls.instance = super().__new__(cls) - - return cls.instance - - def __init__(self): - # type: () -> None - @add_global_event_processor - def global_event_processor(event, hint): - # type: (Event, Hint) -> Event - return link_trace_context_to_error_event(event, self.otel_span_map) - - def _prune_old_spans(self): - # type: (SentrySpanProcessor) -> None - """ - Prune spans that have been open for too long. - """ - current_time_minutes = int(time() / 60) - for span_start_minutes in list( - self.open_spans.keys() - ): # making a list because we change the dict - # prune empty open spans buckets - if self.open_spans[span_start_minutes] == set(): - self.open_spans.pop(span_start_minutes) - - # prune old buckets - elif current_time_minutes - span_start_minutes > SPAN_MAX_TIME_OPEN_MINUTES: - for span_id in self.open_spans.pop(span_start_minutes): - self.otel_span_map.pop(span_id, None) - - def on_start(self, otel_span, parent_context=None): - # type: (OTelSpan, Optional[context_api.Context]) -> None - client = get_client() - - if not client.dsn: - return - - try: - _ = Dsn(client.dsn) - except Exception: - return - - if client.options["instrumenter"] != INSTRUMENTER.OTEL: - return - - if not otel_span.get_span_context().is_valid: - return - - if self._is_sentry_span(otel_span): - return - - trace_data = self._get_trace_data(otel_span, parent_context) - - parent_span_id = trace_data["parent_span_id"] - sentry_parent_span = ( - self.otel_span_map.get(parent_span_id) if parent_span_id else None - ) - - start_timestamp = None - if otel_span.start_time is not None: - start_timestamp = datetime.fromtimestamp( - otel_span.start_time / 1e9, timezone.utc - ) # OTel spans have nanosecond precision - - sentry_span = None - if sentry_parent_span: - sentry_span = sentry_parent_span.start_child( - span_id=trace_data["span_id"], - name=otel_span.name, - start_timestamp=start_timestamp, - instrumenter=INSTRUMENTER.OTEL, - origin=SPAN_ORIGIN, - ) - else: - sentry_span = start_transaction( - name=otel_span.name, - span_id=trace_data["span_id"], - parent_span_id=parent_span_id, - trace_id=trace_data["trace_id"], - baggage=trace_data["baggage"], - start_timestamp=start_timestamp, - instrumenter=INSTRUMENTER.OTEL, - origin=SPAN_ORIGIN, - ) - - self.otel_span_map[trace_data["span_id"]] = sentry_span - - if otel_span.start_time is not None: - span_start_in_minutes = int( - otel_span.start_time / 1e9 / 60 - ) # OTel spans have nanosecond precision - self.open_spans.setdefault(span_start_in_minutes, set()).add( - trace_data["span_id"] - ) - - self._prune_old_spans() - - def on_end(self, otel_span): - # type: (OTelSpan) -> None - client = get_client() - - if client.options["instrumenter"] != INSTRUMENTER.OTEL: - return - - span_context = otel_span.get_span_context() - if not span_context.is_valid: - return - - span_id = format_span_id(span_context.span_id) - sentry_span = self.otel_span_map.pop(span_id, None) - if not sentry_span: - return - - sentry_span.op = otel_span.name - - self._update_span_with_otel_status(sentry_span, otel_span) - - if isinstance(sentry_span, Transaction): - sentry_span.name = otel_span.name - sentry_span.set_context( - OPEN_TELEMETRY_CONTEXT, self._get_otel_context(otel_span) - ) - self._update_transaction_with_otel_data(sentry_span, otel_span) - - else: - self._update_span_with_otel_data(sentry_span, otel_span) - - end_timestamp = None - if otel_span.end_time is not None: - end_timestamp = datetime.fromtimestamp( - otel_span.end_time / 1e9, timezone.utc - ) # OTel spans have nanosecond precision - - sentry_span.finish(end_timestamp=end_timestamp) - - if otel_span.start_time is not None: - span_start_in_minutes = int( - otel_span.start_time / 1e9 / 60 - ) # OTel spans have nanosecond precision - self.open_spans.setdefault(span_start_in_minutes, set()).discard(span_id) - - self._prune_old_spans() - - def _is_sentry_span(self, otel_span): - # type: (OTelSpan) -> bool - """ - Break infinite loop: - HTTP requests to Sentry are caught by OTel and send again to Sentry. - """ - otel_span_url = None - if otel_span.attributes is not None: - otel_span_url = otel_span.attributes.get(SpanAttributes.HTTP_URL) - otel_span_url = cast("Optional[str]", otel_span_url) - - dsn_url = None - client = get_client() - if client.dsn: - dsn_url = Dsn(client.dsn).netloc - - if otel_span_url and dsn_url and dsn_url in otel_span_url: - return True - - return False - - def _get_otel_context(self, otel_span): - # type: (OTelSpan) -> dict[str, Any] - """ - Returns the OTel context for Sentry. - See: https://develop.sentry.dev/sdk/performance/opentelemetry/#step-5-add-opentelemetry-context - """ - ctx = {} - - if otel_span.attributes: - ctx["attributes"] = dict(otel_span.attributes) - - if otel_span.resource.attributes: - ctx["resource"] = dict(otel_span.resource.attributes) - - return ctx - - def _get_trace_data(self, otel_span, parent_context): - # type: (OTelSpan, Optional[context_api.Context]) -> dict[str, Any] - """ - Extracts tracing information from one OTel span and its parent OTel context. - """ - trace_data = {} # type: dict[str, Any] - span_context = otel_span.get_span_context() - - span_id = format_span_id(span_context.span_id) - trace_data["span_id"] = span_id - - trace_id = format_trace_id(span_context.trace_id) - trace_data["trace_id"] = trace_id - - parent_span_id = ( - format_span_id(otel_span.parent.span_id) if otel_span.parent else None - ) - trace_data["parent_span_id"] = parent_span_id - - sentry_trace_data = get_value(SENTRY_TRACE_KEY, parent_context) - sentry_trace_data = cast("dict[str, Union[str, bool, None]]", sentry_trace_data) - trace_data["parent_sampled"] = ( - sentry_trace_data["parent_sampled"] if sentry_trace_data else None - ) - - baggage = get_value(SENTRY_BAGGAGE_KEY, parent_context) - trace_data["baggage"] = baggage - - return trace_data - - def _update_span_with_otel_status(self, sentry_span, otel_span): - # type: (SentrySpan, OTelSpan) -> None - """ - Set the Sentry span status from the OTel span - """ - if otel_span.status.is_unset: - return - - if otel_span.status.is_ok: - sentry_span.set_status(SPANSTATUS.OK) - return - - sentry_span.set_status(SPANSTATUS.INTERNAL_ERROR) - - def _update_span_with_otel_data(self, sentry_span, otel_span): - # type: (SentrySpan, OTelSpan) -> None - """ - Convert OTel span data and update the Sentry span with it. - This should eventually happen on the server when ingesting the spans. - """ - sentry_span.set_data("otel.kind", otel_span.kind) - - op = otel_span.name - description = otel_span.name - - if otel_span.attributes is not None: - for key, val in otel_span.attributes.items(): - sentry_span.set_data(key, val) - - http_method = otel_span.attributes.get(SpanAttributes.HTTP_METHOD) - http_method = cast("Optional[str]", http_method) - - db_query = otel_span.attributes.get(SpanAttributes.DB_SYSTEM) - - if http_method: - op = "http" - - if otel_span.kind == SpanKind.SERVER: - op += ".server" - elif otel_span.kind == SpanKind.CLIENT: - op += ".client" - - description = http_method - - peer_name = otel_span.attributes.get(SpanAttributes.NET_PEER_NAME, None) - if peer_name: - description += " {}".format(peer_name) - - target = otel_span.attributes.get(SpanAttributes.HTTP_TARGET, None) - if target: - description += " {}".format(target) - - if not peer_name and not target: - url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None) - url = cast("Optional[str]", url) - if url: - parsed_url = urlparse(url) - url = "{}://{}{}".format( - parsed_url.scheme, parsed_url.netloc, parsed_url.path - ) - description += " {}".format(url) - - status_code = otel_span.attributes.get( - SpanAttributes.HTTP_STATUS_CODE, None - ) - status_code = cast("Optional[int]", status_code) - if status_code: - sentry_span.set_http_status(status_code) - - elif db_query: - op = "db" - statement = otel_span.attributes.get(SpanAttributes.DB_STATEMENT, None) - statement = cast("Optional[str]", statement) - if statement: - description = statement - - sentry_span.op = op - sentry_span.description = description - - def _update_transaction_with_otel_data(self, sentry_span, otel_span): - # type: (SentrySpan, OTelSpan) -> None - if otel_span.attributes is None: - return - - http_method = otel_span.attributes.get(SpanAttributes.HTTP_METHOD) - - if http_method: - status_code = otel_span.attributes.get(SpanAttributes.HTTP_STATUS_CODE) - status_code = cast("Optional[int]", status_code) - if status_code: - sentry_span.set_http_status(status_code) - - op = "http" - - if otel_span.kind == SpanKind.SERVER: - op += ".server" - elif otel_span.kind == SpanKind.CLIENT: - op += ".client" - - sentry_span.op = op diff --git a/sentry_sdk/integrations/pure_eval.py b/sentry_sdk/integrations/pure_eval.py index c1c3d63871..74cfa5a7c6 100644 --- a/sentry_sdk/integrations/pure_eval.py +++ b/sentry_sdk/integrations/pure_eval.py @@ -1,3 +1,4 @@ +from __future__ import annotations import ast import sentry_sdk @@ -35,12 +36,10 @@ class PureEvalIntegration(Integration): identifier = "pure_eval" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: @add_global_event_processor - def add_executing_info(event, hint): - # type: (Event, Optional[Hint]) -> Optional[Event] + def add_executing_info(event: Event, hint: Optional[Hint]) -> Optional[Event]: if sentry_sdk.get_client().get_integration(PureEvalIntegration) is None: return event @@ -81,8 +80,7 @@ def add_executing_info(event, hint): return event -def pure_eval_frame(frame): - # type: (FrameType) -> Dict[str, Any] +def pure_eval_frame(frame: FrameType) -> Dict[str, Any]: source = executing.Source.for_frame(frame) if not source.tree: return {} @@ -103,16 +101,14 @@ def pure_eval_frame(frame): evaluator = pure_eval.Evaluator.from_frame(frame) expressions = evaluator.interesting_expressions_grouped(scope) - def closeness(expression): - # type: (Tuple[List[Any], Any]) -> Tuple[int, int] + def closeness(expression: Tuple[List[Any], Any]) -> Tuple[int, int]: # Prioritise expressions with a node closer to the statement executed # without being after that statement # A higher return value is better - the expression will appear # earlier in the list of values and is less likely to be trimmed nodes, _value = expression - def start(n): - # type: (ast.expr) -> Tuple[int, int] + def start(n: ast.expr) -> Tuple[int, int]: return (n.lineno, n.col_offset) nodes_before_stmt = [ diff --git a/sentry_sdk/integrations/pymongo.py b/sentry_sdk/integrations/pymongo.py index f65ad73687..2fc967a812 100644 --- a/sentry_sdk/integrations/pymongo.py +++ b/sentry_sdk/integrations/pymongo.py @@ -1,12 +1,12 @@ +from __future__ import annotations import copy -import json import sentry_sdk from sentry_sdk.consts import SPANSTATUS, SPANDATA, OP from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import Span -from sentry_sdk.utils import capture_internal_exceptions +from sentry_sdk.utils import capture_internal_exceptions, _serialize_span_attribute try: from pymongo import monitoring @@ -42,8 +42,7 @@ ] -def _strip_pii(command): - # type: (Dict[str, Any]) -> Dict[str, Any] +def _strip_pii(command: Dict[str, Any]) -> Dict[str, Any]: for key in command: is_safe_field = key in SAFE_COMMAND_ATTRIBUTES if is_safe_field: @@ -85,8 +84,7 @@ def _strip_pii(command): return command -def _get_db_data(event): - # type: (Any) -> Dict[str, Any] +def _get_db_data(event: Any) -> Dict[str, Any]: data = {} data[SPANDATA.DB_SYSTEM] = "mongodb" @@ -107,16 +105,16 @@ def _get_db_data(event): class CommandTracer(monitoring.CommandListener): - def __init__(self): - # type: () -> None - self._ongoing_operations = {} # type: Dict[int, Span] + def __init__(self) -> None: + self._ongoing_operations: Dict[int, Span] = {} - def _operation_key(self, event): - # type: (Union[CommandFailedEvent, CommandStartedEvent, CommandSucceededEvent]) -> int + def _operation_key( + self, + event: Union[CommandFailedEvent, CommandStartedEvent, CommandSucceededEvent], + ) -> int: return event.request_id - def started(self, event): - # type: (CommandStartedEvent) -> None + def started(self, event: CommandStartedEvent) -> None: if sentry_sdk.get_client().get_integration(PyMongoIntegration) is None: return @@ -127,60 +125,53 @@ def started(self, event): command.pop("$clusterTime", None) command.pop("$signature", None) - tags = { - "db.name": event.database_name, + data = { + SPANDATA.DB_NAME: event.database_name, SPANDATA.DB_SYSTEM: "mongodb", SPANDATA.DB_OPERATION: event.command_name, SPANDATA.DB_MONGODB_COLLECTION: command.get(event.command_name), } try: - tags["net.peer.name"] = event.connection_id[0] - tags["net.peer.port"] = str(event.connection_id[1]) + data["net.peer.name"] = event.connection_id[0] + data["net.peer.port"] = str(event.connection_id[1]) except TypeError: pass - data = {"operation_ids": {}} # type: Dict[str, Any] - data["operation_ids"]["operation"] = event.operation_id - data["operation_ids"]["request"] = event.request_id - - data.update(_get_db_data(event)) - try: lsid = command.pop("lsid")["id"] - data["operation_ids"]["session"] = str(lsid) + data["session_id"] = str(lsid) except KeyError: pass if not should_send_default_pii(): command = _strip_pii(command) - query = json.dumps(command, default=str) + query = _serialize_span_attribute(command) span = sentry_sdk.start_span( op=OP.DB, name=query, origin=PyMongoIntegration.origin, + only_as_child_span=True, ) - for tag, value in tags.items(): - # set the tag for backwards-compatibility. - # TODO: remove the set_tag call in the next major release! - span.set_tag(tag, value) - - span.set_data(tag, value) - - for key, value in data.items(): - span.set_data(key, value) - with capture_internal_exceptions(): sentry_sdk.add_breadcrumb( - message=query, category="query", type=OP.DB, data=tags + message=query, category="query", type=OP.DB, data=data ) + for key, value in data.items(): + span.set_attribute(key, value) + + for key, value in _get_db_data(event).items(): + span.set_attribute(key, value) + + span.set_attribute("operation_id", event.operation_id) + span.set_attribute("request_id", event.request_id) + self._ongoing_operations[self._operation_key(event)] = span.__enter__() - def failed(self, event): - # type: (CommandFailedEvent) -> None + def failed(self, event: CommandFailedEvent) -> None: if sentry_sdk.get_client().get_integration(PyMongoIntegration) is None: return @@ -191,8 +182,7 @@ def failed(self, event): except KeyError: return - def succeeded(self, event): - # type: (CommandSucceededEvent) -> None + def succeeded(self, event: CommandSucceededEvent) -> None: if sentry_sdk.get_client().get_integration(PyMongoIntegration) is None: return @@ -209,6 +199,5 @@ class PyMongoIntegration(Integration): origin = f"auto.db.{identifier}" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: monitoring.register(CommandTracer()) diff --git a/sentry_sdk/integrations/pyramid.py b/sentry_sdk/integrations/pyramid.py index d1475ada65..68a725451a 100644 --- a/sentry_sdk/integrations/pyramid.py +++ b/sentry_sdk/integrations/pyramid.py @@ -1,14 +1,15 @@ +from __future__ import annotations import functools import os import sys import weakref import sentry_sdk +from sentry_sdk.consts import SOURCE_FOR_STYLE from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import RequestExtractor from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, @@ -40,8 +41,7 @@ if getattr(Request, "authenticated_userid", None): - def authenticated_userid(request): - # type: (Request) -> Optional[Any] + def authenticated_userid(request: Request) -> Optional[Any]: return request.authenticated_userid else: @@ -58,8 +58,7 @@ class PyramidIntegration(Integration): transaction_style = "" - def __init__(self, transaction_style="route_name"): - # type: (str) -> None + def __init__(self, transaction_style: str = "route_name") -> None: if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" @@ -68,15 +67,15 @@ def __init__(self, transaction_style="route_name"): self.transaction_style = transaction_style @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: from pyramid import router old_call_view = router._call_view @functools.wraps(old_call_view) - def sentry_patched_call_view(registry, request, *args, **kwargs): - # type: (Any, Request, *Any, **Any) -> Response + def sentry_patched_call_view( + registry: Any, request: Request, *args: Any, **kwargs: Any + ) -> Response: integration = sentry_sdk.get_client().get_integration(PyramidIntegration) if integration is None: return old_call_view(registry, request, *args, **kwargs) @@ -96,8 +95,9 @@ def sentry_patched_call_view(registry, request, *args, **kwargs): if hasattr(Request, "invoke_exception_view"): old_invoke_exception_view = Request.invoke_exception_view - def sentry_patched_invoke_exception_view(self, *args, **kwargs): - # type: (Request, *Any, **Any) -> Any + def sentry_patched_invoke_exception_view( + self: Request, *args: Any, **kwargs: Any + ) -> Any: rv = old_invoke_exception_view(self, *args, **kwargs) if ( @@ -116,10 +116,12 @@ def sentry_patched_invoke_exception_view(self, *args, **kwargs): old_wsgi_call = router.Router.__call__ @ensure_integration_enabled(PyramidIntegration, old_wsgi_call) - def sentry_patched_wsgi_call(self, environ, start_response): - # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse - def sentry_patched_inner_wsgi_call(environ, start_response): - # type: (Dict[str, Any], Callable[..., Any]) -> Any + def sentry_patched_wsgi_call( + self: Any, environ: Dict[str, str], start_response: Callable[..., Any] + ) -> _ScopedResponse: + def sentry_patched_inner_wsgi_call( + environ: Dict[str, Any], start_response: Callable[..., Any] + ) -> Any: try: return old_wsgi_call(self, environ, start_response) except Exception: @@ -137,8 +139,7 @@ def sentry_patched_inner_wsgi_call(environ, start_response): @ensure_integration_enabled(PyramidIntegration) -def _capture_exception(exc_info): - # type: (ExcInfo) -> None +def _capture_exception(exc_info: ExcInfo) -> None: if exc_info[0] is None or issubclass(exc_info[0], HTTPException): return @@ -151,8 +152,9 @@ def _capture_exception(exc_info): sentry_sdk.capture_event(event, hint=hint) -def _set_transaction_name_and_source(scope, transaction_style, request): - # type: (sentry_sdk.Scope, str, Request) -> None +def _set_transaction_name_and_source( + scope: sentry_sdk.Scope, transaction_style: str, request: Request +) -> None: try: name_for_style = { "route_name": request.matched_route.name, @@ -167,40 +169,33 @@ def _set_transaction_name_and_source(scope, transaction_style, request): class PyramidRequestExtractor(RequestExtractor): - def url(self): - # type: () -> str + def url(self) -> str: return self.request.path_url - def env(self): - # type: () -> Dict[str, str] + def env(self) -> Dict[str, str]: return self.request.environ - def cookies(self): - # type: () -> RequestCookies + def cookies(self) -> RequestCookies: return self.request.cookies - def raw_data(self): - # type: () -> str + def raw_data(self) -> str: return self.request.text - def form(self): - # type: () -> Dict[str, str] + def form(self) -> Dict[str, str]: return { key: value for key, value in self.request.POST.items() if not getattr(value, "filename", None) } - def files(self): - # type: () -> Dict[str, _FieldStorageWithFile] + def files(self) -> Dict[str, _FieldStorageWithFile]: return { key: value for key, value in self.request.POST.items() if getattr(value, "filename", None) } - def size_of_file(self, postdata): - # type: (_FieldStorageWithFile) -> int + def size_of_file(self, postdata: _FieldStorageWithFile) -> int: file = postdata.file try: return os.fstat(file.fileno()).st_size @@ -208,10 +203,10 @@ def size_of_file(self, postdata): return 0 -def _make_event_processor(weak_request, integration): - # type: (Callable[[], Request], PyramidIntegration) -> EventProcessor - def pyramid_event_processor(event, hint): - # type: (Event, Dict[str, Any]) -> Event +def _make_event_processor( + weak_request: Callable[[], Request], integration: PyramidIntegration +) -> EventProcessor: + def pyramid_event_processor(event: Event, hint: Dict[str, Any]) -> Event: request = weak_request() if request is None: return event diff --git a/sentry_sdk/integrations/quart.py b/sentry_sdk/integrations/quart.py index 51306bb4cd..eb7e117cc9 100644 --- a/sentry_sdk/integrations/quart.py +++ b/sentry_sdk/integrations/quart.py @@ -1,13 +1,14 @@ +from __future__ import annotations import asyncio import inspect from functools import wraps import sentry_sdk +from sentry_sdk.consts import SOURCE_FOR_STYLE from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations._wsgi_common import _filter_headers from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, @@ -60,8 +61,7 @@ class QuartIntegration(Integration): transaction_style = "" - def __init__(self, transaction_style="endpoint"): - # type: (str) -> None + def __init__(self, transaction_style: str = "endpoint") -> None: if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" @@ -70,8 +70,7 @@ def __init__(self, transaction_style="endpoint"): self.transaction_style = transaction_style @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: request_started.connect(_request_websocket_started) websocket_started.connect(_request_websocket_started) @@ -83,12 +82,12 @@ def setup_once(): patch_scaffold_route() -def patch_asgi_app(): - # type: () -> None +def patch_asgi_app() -> None: old_app = Quart.__call__ - async def sentry_patched_asgi_app(self, scope, receive, send): - # type: (Any, Any, Any, Any) -> Any + async def sentry_patched_asgi_app( + self: Any, scope: Any, receive: Any, send: Any + ) -> Any: if sentry_sdk.get_client().get_integration(QuartIntegration) is None: return await old_app(self, scope, receive, send) @@ -102,16 +101,13 @@ async def sentry_patched_asgi_app(self, scope, receive, send): Quart.__call__ = sentry_patched_asgi_app -def patch_scaffold_route(): - # type: () -> None +def patch_scaffold_route() -> None: old_route = Scaffold.route - def _sentry_route(*args, **kwargs): - # type: (*Any, **Any) -> Any + def _sentry_route(*args: Any, **kwargs: Any) -> Any: old_decorator = old_route(*args, **kwargs) - def decorator(old_func): - # type: (Any) -> Any + def decorator(old_func: Any) -> Any: if inspect.isfunction(old_func) and not asyncio.iscoroutinefunction( old_func @@ -119,11 +115,10 @@ def decorator(old_func): @wraps(old_func) @ensure_integration_enabled(QuartIntegration, old_func) - def _sentry_func(*args, **kwargs): - # type: (*Any, **Any) -> Any + def _sentry_func(*args: Any, **kwargs: Any) -> Any: current_scope = sentry_sdk.get_current_scope() - if current_scope.transaction is not None: - current_scope.transaction.update_active_thread() + if current_scope.root_span is not None: + current_scope.root_span.update_active_thread() sentry_scope = sentry_sdk.get_isolation_scope() if sentry_scope.profile is not None: @@ -140,8 +135,9 @@ def _sentry_func(*args, **kwargs): Scaffold.route = _sentry_route -def _set_transaction_name_and_source(scope, transaction_style, request): - # type: (sentry_sdk.Scope, str, Request) -> None +def _set_transaction_name_and_source( + scope: sentry_sdk.Scope, transaction_style: str, request: Request +) -> None: try: name_for_style = { @@ -156,8 +152,7 @@ def _set_transaction_name_and_source(scope, transaction_style, request): pass -async def _request_websocket_started(app, **kwargs): - # type: (Quart, **Any) -> None +async def _request_websocket_started(app: Quart, **kwargs: Any) -> None: integration = sentry_sdk.get_client().get_integration(QuartIntegration) if integration is None: return @@ -178,10 +173,10 @@ async def _request_websocket_started(app, **kwargs): scope.add_event_processor(evt_processor) -def _make_request_event_processor(app, request, integration): - # type: (Quart, Request, QuartIntegration) -> EventProcessor - def inner(event, hint): - # type: (Event, dict[str, Any]) -> Event +def _make_request_event_processor( + app: Quart, request: Request, integration: QuartIntegration +) -> EventProcessor: + def inner(event: Event, hint: dict[str, Any]) -> Event: # if the request is gone we are fine not logging the data from # it. This might happen if the processor is pushed away to # another thread. @@ -207,8 +202,9 @@ def inner(event, hint): return inner -async def _capture_exception(sender, exception, **kwargs): - # type: (Quart, Union[ValueError, BaseException], **Any) -> None +async def _capture_exception( + sender: Quart, exception: Union[ValueError, BaseException], **kwargs: Any +) -> None: integration = sentry_sdk.get_client().get_integration(QuartIntegration) if integration is None: return @@ -222,8 +218,7 @@ async def _capture_exception(sender, exception, **kwargs): sentry_sdk.capture_event(event, hint=hint) -def _add_user_to_event(event): - # type: (Event) -> None +def _add_user_to_event(event: Event) -> None: if quart_auth is None: return diff --git a/sentry_sdk/integrations/ray.py b/sentry_sdk/integrations/ray.py index 8d6cdc1201..2879dcb9fd 100644 --- a/sentry_sdk/integrations/ray.py +++ b/sentry_sdk/integrations/ray.py @@ -1,3 +1,4 @@ +from __future__ import annotations import inspect import sys @@ -26,9 +27,10 @@ from typing import Any, Optional from sentry_sdk.utils import ExcInfo +DEFAULT_TRANSACTION_NAME = "unknown Ray function" -def _check_sentry_initialized(): - # type: () -> None + +def _check_sentry_initialized() -> None: if sentry_sdk.get_client().is_active(): return @@ -37,13 +39,13 @@ def _check_sentry_initialized(): ) -def _patch_ray_remote(): - # type: () -> None +def _patch_ray_remote() -> None: old_remote = ray.remote @functools.wraps(old_remote) - def new_remote(f=None, *args, **kwargs): - # type: (Optional[Callable[..., Any]], *Any, **Any) -> Callable[..., Any] + def new_remote( + f: Optional[Callable[..., Any]] = None, *args: Any, **kwargs: Any + ) -> Callable[..., Any]: if inspect.isclass(f): # Ray Actors @@ -52,31 +54,36 @@ def new_remote(f=None, *args, **kwargs): # (Only Ray Tasks are supported) return old_remote(f, *args, **kwargs) - def wrapper(user_f): - # type: (Callable[..., Any]) -> Any - def new_func(*f_args, _tracing=None, **f_kwargs): - # type: (Any, Optional[dict[str, Any]], Any) -> Any + def wrapper(user_f: Callable[..., Any]) -> Any: + def new_func( + *f_args: Any, _tracing: Optional[dict[str, Any]] = None, **f_kwargs: Any + ) -> Any: _check_sentry_initialized() - transaction = sentry_sdk.continue_trace( - _tracing or {}, - op=OP.QUEUE_TASK_RAY, - name=qualname_from_function(user_f), - origin=RayIntegration.origin, + root_span_name = ( + qualname_from_function(user_f) or DEFAULT_TRANSACTION_NAME + ) + sentry_sdk.get_current_scope().set_transaction_name( + root_span_name, source=TransactionSource.TASK, ) - - with sentry_sdk.start_transaction(transaction) as transaction: - try: - result = user_f(*f_args, **f_kwargs) - transaction.set_status(SPANSTATUS.OK) - except Exception: - transaction.set_status(SPANSTATUS.INTERNAL_ERROR) - exc_info = sys.exc_info() - _capture_exception(exc_info) - reraise(*exc_info) - - return result + with sentry_sdk.continue_trace(_tracing or {}): + with sentry_sdk.start_span( + op=OP.QUEUE_TASK_RAY, + name=qualname_from_function(user_f), + origin=RayIntegration.origin, + source=TransactionSource.TASK, + ) as root_span: + try: + result = user_f(*f_args, **f_kwargs) + root_span.set_status(SPANSTATUS.OK) + except Exception: + root_span.set_status(SPANSTATUS.INTERNAL_ERROR) + exc_info = sys.exc_info() + _capture_exception(exc_info) + reraise(*exc_info) + + return result if f: rv = old_remote(new_func) @@ -84,8 +91,9 @@ def new_func(*f_args, _tracing=None, **f_kwargs): rv = old_remote(*args, **kwargs)(new_func) old_remote_method = rv.remote - def _remote_method_with_header_propagation(*args, **kwargs): - # type: (*Any, **Any) -> Any + def _remote_method_with_header_propagation( + *args: Any, **kwargs: Any + ) -> Any: """ Ray Client """ @@ -93,6 +101,7 @@ def _remote_method_with_header_propagation(*args, **kwargs): op=OP.QUEUE_SUBMIT_RAY, name=qualname_from_function(user_f), origin=RayIntegration.origin, + only_as_child_span=True, ) as span: tracing = { k: v @@ -121,8 +130,7 @@ def _remote_method_with_header_propagation(*args, **kwargs): ray.remote = new_remote -def _capture_exception(exc_info, **kwargs): - # type: (ExcInfo, **Any) -> None +def _capture_exception(exc_info: ExcInfo, **kwargs: Any) -> None: client = sentry_sdk.get_client() event, hint = event_from_exception( @@ -141,8 +149,7 @@ class RayIntegration(Integration): origin = f"auto.queue.{identifier}" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: version = package_version("ray") _check_minimum_version(RayIntegration, version) diff --git a/sentry_sdk/integrations/redis/__init__.py b/sentry_sdk/integrations/redis/__init__.py index f443138295..1d0b39f1cb 100644 --- a/sentry_sdk/integrations/redis/__init__.py +++ b/sentry_sdk/integrations/redis/__init__.py @@ -1,3 +1,4 @@ +from __future__ import annotations from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations.redis.consts import _DEFAULT_MAX_DATA_SIZE from sentry_sdk.integrations.redis.rb import _patch_rb @@ -15,14 +16,16 @@ class RedisIntegration(Integration): identifier = "redis" - def __init__(self, max_data_size=_DEFAULT_MAX_DATA_SIZE, cache_prefixes=None): - # type: (int, Optional[list[str]]) -> None + def __init__( + self, + max_data_size: int = _DEFAULT_MAX_DATA_SIZE, + cache_prefixes: Optional[list[str]] = None, + ) -> None: self.max_data_size = max_data_size self.cache_prefixes = cache_prefixes if cache_prefixes is not None else [] @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: try: from redis import StrictRedis, client except ImportError: diff --git a/sentry_sdk/integrations/redis/_async_common.py b/sentry_sdk/integrations/redis/_async_common.py index b96986fba3..c01672afed 100644 --- a/sentry_sdk/integrations/redis/_async_common.py +++ b/sentry_sdk/integrations/redis/_async_common.py @@ -1,16 +1,18 @@ +from __future__ import annotations import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.integrations.redis.consts import SPAN_ORIGIN from sentry_sdk.integrations.redis.modules.caches import ( _compile_cache_span_properties, - _set_cache_data, + _get_cache_data, ) from sentry_sdk.integrations.redis.modules.queries import _compile_db_span_properties from sentry_sdk.integrations.redis.utils import ( - _set_client_data, - _set_pipeline_data, + _create_breadcrumb, + _get_client_data, + _get_pipeline_data, + _update_span, ) -from sentry_sdk.tracing import Span from sentry_sdk.utils import capture_internal_exceptions from typing import TYPE_CHECKING @@ -23,15 +25,16 @@ def patch_redis_async_pipeline( - pipeline_cls, is_cluster, get_command_args_fn, set_db_data_fn -): - # type: (Union[type[Pipeline[Any]], type[ClusterPipeline[Any]]], bool, Any, Callable[[Span, Any], None]) -> None + pipeline_cls: Union[type[Pipeline[Any]], type[ClusterPipeline[Any]]], + is_cluster: bool, + get_command_args_fn: Any, + get_db_data_fn: Callable[[Any], dict[str, Any]], +) -> None: old_execute = pipeline_cls.execute from sentry_sdk.integrations.redis import RedisIntegration - async def _sentry_execute(self, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any + async def _sentry_execute(self: Any, *args: Any, **kwargs: Any) -> Any: if sentry_sdk.get_client().get_integration(RedisIntegration) is None: return await old_execute(self, *args, **kwargs) @@ -39,8 +42,11 @@ async def _sentry_execute(self, *args, **kwargs): op=OP.DB_REDIS, name="redis.pipeline.execute", origin=SPAN_ORIGIN, + only_as_child_span=True, ) as span: with capture_internal_exceptions(): + span_data = get_db_data_fn(self) + try: command_seq = self._execution_strategy._command_queue except AttributeError: @@ -49,28 +55,32 @@ async def _sentry_execute(self, *args, **kwargs): else: command_seq = self.command_stack - set_db_data_fn(span, self) - _set_pipeline_data( - span, - is_cluster, - get_command_args_fn, - False if is_cluster else self.is_transaction, - command_seq, + pipeline_data = _get_pipeline_data( + is_cluster=is_cluster, + get_command_args_fn=get_command_args_fn, + is_transaction=False if is_cluster else self.is_transaction, + command_seq=command_seq, ) + _update_span(span, span_data, pipeline_data) + _create_breadcrumb("redis.pipeline.execute", span_data, pipeline_data) return await old_execute(self, *args, **kwargs) pipeline_cls.execute = _sentry_execute # type: ignore -def patch_redis_async_client(cls, is_cluster, set_db_data_fn): - # type: (Union[type[StrictRedis[Any]], type[RedisCluster[Any]]], bool, Callable[[Span, Any], None]) -> None +def patch_redis_async_client( + cls: Union[type[StrictRedis[Any]], type[RedisCluster[Any]]], + is_cluster: bool, + get_db_data_fn: Callable[[Any], dict[str, Any]], +) -> None: old_execute_command = cls.execute_command from sentry_sdk.integrations.redis import RedisIntegration - async def _sentry_execute_command(self, name, *args, **kwargs): - # type: (Any, str, *Any, **Any) -> Any + async def _sentry_execute_command( + self: Any, name: str, *args: Any, **kwargs: Any + ) -> Any: integration = sentry_sdk.get_client().get_integration(RedisIntegration) if integration is None: return await old_execute_command(self, name, *args, **kwargs) @@ -88,6 +98,7 @@ async def _sentry_execute_command(self, name, *args, **kwargs): op=cache_properties["op"], name=cache_properties["description"], origin=SPAN_ORIGIN, + only_as_child_span=True, ) cache_span.__enter__() @@ -97,18 +108,24 @@ async def _sentry_execute_command(self, name, *args, **kwargs): op=db_properties["op"], name=db_properties["description"], origin=SPAN_ORIGIN, + only_as_child_span=True, ) db_span.__enter__() - set_db_data_fn(db_span, self) - _set_client_data(db_span, is_cluster, name, *args) + db_span_data = get_db_data_fn(self) + db_client_span_data = _get_client_data(is_cluster, name, *args) + _update_span(db_span, db_span_data, db_client_span_data) + _create_breadcrumb( + db_properties["description"], db_span_data, db_client_span_data + ) value = await old_execute_command(self, name, *args, **kwargs) db_span.__exit__(None, None, None) if cache_span: - _set_cache_data(cache_span, self, cache_properties, value) + cache_span_data = _get_cache_data(self, cache_properties, value) + _update_span(cache_span, cache_span_data) cache_span.__exit__(None, None, None) return value diff --git a/sentry_sdk/integrations/redis/_sync_common.py b/sentry_sdk/integrations/redis/_sync_common.py index 72f3eb7778..93a9a04463 100644 --- a/sentry_sdk/integrations/redis/_sync_common.py +++ b/sentry_sdk/integrations/redis/_sync_common.py @@ -1,16 +1,18 @@ +from __future__ import annotations import sentry_sdk from sentry_sdk.consts import OP from sentry_sdk.integrations.redis.consts import SPAN_ORIGIN from sentry_sdk.integrations.redis.modules.caches import ( _compile_cache_span_properties, - _set_cache_data, + _get_cache_data, ) from sentry_sdk.integrations.redis.modules.queries import _compile_db_span_properties from sentry_sdk.integrations.redis.utils import ( - _set_client_data, - _set_pipeline_data, + _create_breadcrumb, + _get_client_data, + _get_pipeline_data, + _update_span, ) -from sentry_sdk.tracing import Span from sentry_sdk.utils import capture_internal_exceptions from typing import TYPE_CHECKING @@ -21,18 +23,16 @@ def patch_redis_pipeline( - pipeline_cls, - is_cluster, - get_command_args_fn, - set_db_data_fn, -): - # type: (Any, bool, Any, Callable[[Span, Any], None]) -> None + pipeline_cls: Any, + is_cluster: bool, + get_command_args_fn: Any, + get_db_data_fn: Callable[[Any], dict[str, Any]], +) -> None: old_execute = pipeline_cls.execute from sentry_sdk.integrations.redis import RedisIntegration - def sentry_patched_execute(self, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any + def sentry_patched_execute(self: Any, *args: Any, **kwargs: Any) -> Any: if sentry_sdk.get_client().get_integration(RedisIntegration) is None: return old_execute(self, *args, **kwargs) @@ -40,30 +40,32 @@ def sentry_patched_execute(self, *args, **kwargs): op=OP.DB_REDIS, name="redis.pipeline.execute", origin=SPAN_ORIGIN, + only_as_child_span=True, ) as span: with capture_internal_exceptions(): - command_seq = None try: command_seq = self._execution_strategy.command_queue except AttributeError: command_seq = self.command_stack - set_db_data_fn(span, self) - _set_pipeline_data( - span, - is_cluster, - get_command_args_fn, - False if is_cluster else self.transaction, - command_seq, + span_data = get_db_data_fn(self) + pipeline_data = _get_pipeline_data( + is_cluster=is_cluster, + get_command_args_fn=get_command_args_fn, + is_transaction=False if is_cluster else self.transaction, + command_seq=command_seq, ) + _update_span(span, span_data, pipeline_data) + _create_breadcrumb("redis.pipeline.execute", span_data, pipeline_data) return old_execute(self, *args, **kwargs) pipeline_cls.execute = sentry_patched_execute -def patch_redis_client(cls, is_cluster, set_db_data_fn): - # type: (Any, bool, Callable[[Span, Any], None]) -> None +def patch_redis_client( + cls: Any, is_cluster: bool, get_db_data_fn: Callable[[Any], dict[str, Any]] +) -> None: """ This function can be used to instrument custom redis client classes or subclasses. @@ -72,8 +74,9 @@ def patch_redis_client(cls, is_cluster, set_db_data_fn): from sentry_sdk.integrations.redis import RedisIntegration - def sentry_patched_execute_command(self, name, *args, **kwargs): - # type: (Any, str, *Any, **Any) -> Any + def sentry_patched_execute_command( + self: Any, name: str, *args: Any, **kwargs: Any + ) -> Any: integration = sentry_sdk.get_client().get_integration(RedisIntegration) if integration is None: return old_execute_command(self, name, *args, **kwargs) @@ -91,6 +94,7 @@ def sentry_patched_execute_command(self, name, *args, **kwargs): op=cache_properties["op"], name=cache_properties["description"], origin=SPAN_ORIGIN, + only_as_child_span=True, ) cache_span.__enter__() @@ -100,18 +104,24 @@ def sentry_patched_execute_command(self, name, *args, **kwargs): op=db_properties["op"], name=db_properties["description"], origin=SPAN_ORIGIN, + only_as_child_span=True, ) db_span.__enter__() - set_db_data_fn(db_span, self) - _set_client_data(db_span, is_cluster, name, *args) + db_span_data = get_db_data_fn(self) + db_client_span_data = _get_client_data(is_cluster, name, *args) + _update_span(db_span, db_span_data, db_client_span_data) + _create_breadcrumb( + db_properties["description"], db_span_data, db_client_span_data + ) value = old_execute_command(self, name, *args, **kwargs) db_span.__exit__(None, None, None) if cache_span: - _set_cache_data(cache_span, self, cache_properties, value) + cache_span_data = _get_cache_data(self, cache_properties, value) + _update_span(cache_span, cache_span_data) cache_span.__exit__(None, None, None) return value diff --git a/sentry_sdk/integrations/redis/modules/caches.py b/sentry_sdk/integrations/redis/modules/caches.py index c6fc19f5b2..574c928f12 100644 --- a/sentry_sdk/integrations/redis/modules/caches.py +++ b/sentry_sdk/integrations/redis/modules/caches.py @@ -2,6 +2,7 @@ Code used for the Caches module in Sentry """ +from __future__ import annotations from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations.redis.utils import _get_safe_key, _key_as_string from sentry_sdk.utils import capture_internal_exceptions @@ -13,12 +14,10 @@ if TYPE_CHECKING: from sentry_sdk.integrations.redis import RedisIntegration - from sentry_sdk.tracing import Span from typing import Any, Optional -def _get_op(name): - # type: (str) -> Optional[str] +def _get_op(name: str) -> Optional[str]: op = None if name.lower() in GET_COMMANDS: op = OP.CACHE_GET @@ -28,8 +27,12 @@ def _get_op(name): return op -def _compile_cache_span_properties(redis_command, args, kwargs, integration): - # type: (str, tuple[Any, ...], dict[str, Any], RedisIntegration) -> dict[str, Any] +def _compile_cache_span_properties( + redis_command: str, + args: tuple[Any, ...], + kwargs: dict[str, Any], + integration: RedisIntegration, +) -> dict[str, Any]: key = _get_safe_key(redis_command, args, kwargs) key_as_string = _key_as_string(key) keys_as_string = key_as_string.split(", ") @@ -62,8 +65,12 @@ def _compile_cache_span_properties(redis_command, args, kwargs, integration): return properties -def _get_cache_span_description(redis_command, args, kwargs, integration): - # type: (str, tuple[Any, ...], dict[str, Any], RedisIntegration) -> str +def _get_cache_span_description( + redis_command: str, + args: tuple[Any, ...], + kwargs: dict[str, Any], + integration: RedisIntegration, +) -> str: description = _key_as_string(_get_safe_key(redis_command, args, kwargs)) data_should_be_truncated = ( @@ -75,22 +82,25 @@ def _get_cache_span_description(redis_command, args, kwargs, integration): return description -def _set_cache_data(span, redis_client, properties, return_value): - # type: (Span, Any, dict[str, Any], Optional[Any]) -> None +def _get_cache_data( + redis_client: Any, properties: dict[str, Any], return_value: Optional[Any] +) -> dict[str, Any]: + data = {} + with capture_internal_exceptions(): - span.set_data(SPANDATA.CACHE_KEY, properties["key"]) + data[SPANDATA.CACHE_KEY] = properties["key"] if properties["redis_command"] in GET_COMMANDS: if return_value is not None: - span.set_data(SPANDATA.CACHE_HIT, True) + data[SPANDATA.CACHE_HIT] = True size = ( len(str(return_value).encode("utf-8")) if not isinstance(return_value, bytes) else len(return_value) ) - span.set_data(SPANDATA.CACHE_ITEM_SIZE, size) + data[SPANDATA.CACHE_ITEM_SIZE] = size else: - span.set_data(SPANDATA.CACHE_HIT, False) + data[SPANDATA.CACHE_HIT] = False elif properties["redis_command"] in SET_COMMANDS: if properties["value"] is not None: @@ -99,7 +109,7 @@ def _set_cache_data(span, redis_client, properties, return_value): if not isinstance(properties["value"], bytes) else len(properties["value"]) ) - span.set_data(SPANDATA.CACHE_ITEM_SIZE, size) + data[SPANDATA.CACHE_ITEM_SIZE] = size try: connection_params = redis_client.connection_pool.connection_kwargs @@ -114,8 +124,10 @@ def _set_cache_data(span, redis_client, properties, return_value): host = connection_params.get("host") if host is not None: - span.set_data(SPANDATA.NETWORK_PEER_ADDRESS, host) + data[SPANDATA.NETWORK_PEER_ADDRESS] = host port = connection_params.get("port") if port is not None: - span.set_data(SPANDATA.NETWORK_PEER_PORT, port) + data[SPANDATA.NETWORK_PEER_PORT] = port + + return data diff --git a/sentry_sdk/integrations/redis/modules/queries.py b/sentry_sdk/integrations/redis/modules/queries.py index e0d85a4ef7..312d48e2bd 100644 --- a/sentry_sdk/integrations/redis/modules/queries.py +++ b/sentry_sdk/integrations/redis/modules/queries.py @@ -2,6 +2,7 @@ Code used for the Queries module in Sentry """ +from __future__ import annotations from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations.redis.utils import _get_safe_command from sentry_sdk.utils import capture_internal_exceptions @@ -11,12 +12,12 @@ if TYPE_CHECKING: from redis import Redis from sentry_sdk.integrations.redis import RedisIntegration - from sentry_sdk.tracing import Span from typing import Any -def _compile_db_span_properties(integration, redis_command, args): - # type: (RedisIntegration, str, tuple[Any, ...]) -> dict[str, Any] +def _compile_db_span_properties( + integration: RedisIntegration, redis_command: str, args: tuple[Any, ...] +) -> dict[str, Any]: description = _get_db_span_description(integration, redis_command, args) properties = { @@ -27,8 +28,9 @@ def _compile_db_span_properties(integration, redis_command, args): return properties -def _get_db_span_description(integration, command_name, args): - # type: (RedisIntegration, str, tuple[Any, ...]) -> str +def _get_db_span_description( + integration: RedisIntegration, command_name: str, args: tuple[Any, ...] +) -> str: description = command_name with capture_internal_exceptions(): @@ -43,26 +45,28 @@ def _get_db_span_description(integration, command_name, args): return description -def _set_db_data_on_span(span, connection_params): - # type: (Span, dict[str, Any]) -> None - span.set_data(SPANDATA.DB_SYSTEM, "redis") +def _get_connection_data(connection_params: dict[str, Any]) -> dict[str, Any]: + data = { + SPANDATA.DB_SYSTEM: "redis", + } db = connection_params.get("db") if db is not None: - span.set_data(SPANDATA.DB_NAME, str(db)) + data[SPANDATA.DB_NAME] = str(db) host = connection_params.get("host") if host is not None: - span.set_data(SPANDATA.SERVER_ADDRESS, host) + data[SPANDATA.SERVER_ADDRESS] = host port = connection_params.get("port") if port is not None: - span.set_data(SPANDATA.SERVER_PORT, port) + data[SPANDATA.SERVER_PORT] = port + + return data -def _set_db_data(span, redis_instance): - # type: (Span, Redis[Any]) -> None +def _get_db_data(redis_instance: Redis[Any]) -> dict[str, Any]: try: - _set_db_data_on_span(span, redis_instance.connection_pool.connection_kwargs) + return _get_connection_data(redis_instance.connection_pool.connection_kwargs) except AttributeError: - pass # connections_kwargs may be missing in some cases + return {} # connections_kwargs may be missing in some cases diff --git a/sentry_sdk/integrations/redis/rb.py b/sentry_sdk/integrations/redis/rb.py index 1b3e2e530c..b6eab57171 100644 --- a/sentry_sdk/integrations/redis/rb.py +++ b/sentry_sdk/integrations/redis/rb.py @@ -4,12 +4,13 @@ https://github.com/getsentry/rb """ +from __future__ import annotations + from sentry_sdk.integrations.redis._sync_common import patch_redis_client -from sentry_sdk.integrations.redis.modules.queries import _set_db_data +from sentry_sdk.integrations.redis.modules.queries import _get_db_data -def _patch_rb(): - # type: () -> None +def _patch_rb() -> None: try: import rb.clients # type: ignore except ImportError: @@ -18,15 +19,15 @@ def _patch_rb(): patch_redis_client( rb.clients.FanoutClient, is_cluster=False, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) patch_redis_client( rb.clients.MappingClient, is_cluster=False, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) patch_redis_client( rb.clients.RoutingClient, is_cluster=False, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) diff --git a/sentry_sdk/integrations/redis/redis.py b/sentry_sdk/integrations/redis/redis.py index c92958a32d..f7332c906b 100644 --- a/sentry_sdk/integrations/redis/redis.py +++ b/sentry_sdk/integrations/redis/redis.py @@ -4,11 +4,13 @@ https://github.com/redis/redis-py """ +from __future__ import annotations + from sentry_sdk.integrations.redis._sync_common import ( patch_redis_client, patch_redis_pipeline, ) -from sentry_sdk.integrations.redis.modules.queries import _set_db_data +from sentry_sdk.integrations.redis.modules.queries import _get_db_data from typing import TYPE_CHECKING @@ -16,23 +18,21 @@ from typing import Any, Sequence -def _get_redis_command_args(command): - # type: (Any) -> Sequence[Any] +def _get_redis_command_args(command: Any) -> Sequence[Any]: return command[0] -def _patch_redis(StrictRedis, client): # noqa: N803 - # type: (Any, Any) -> None +def _patch_redis(StrictRedis: Any, client: Any) -> None: # noqa: N803 patch_redis_client( StrictRedis, is_cluster=False, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) patch_redis_pipeline( client.Pipeline, is_cluster=False, get_command_args_fn=_get_redis_command_args, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) try: strict_pipeline = client.StrictPipeline @@ -43,7 +43,7 @@ def _patch_redis(StrictRedis, client): # noqa: N803 strict_pipeline, is_cluster=False, get_command_args_fn=_get_redis_command_args, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) try: @@ -59,11 +59,11 @@ def _patch_redis(StrictRedis, client): # noqa: N803 patch_redis_async_client( redis.asyncio.client.StrictRedis, is_cluster=False, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) patch_redis_async_pipeline( redis.asyncio.client.Pipeline, False, _get_redis_command_args, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) diff --git a/sentry_sdk/integrations/redis/redis_cluster.py b/sentry_sdk/integrations/redis/redis_cluster.py index 52936d1512..3c4dfdea93 100644 --- a/sentry_sdk/integrations/redis/redis_cluster.py +++ b/sentry_sdk/integrations/redis/redis_cluster.py @@ -5,11 +5,13 @@ https://github.com/redis/redis-py/blob/master/redis/cluster.py """ +from __future__ import annotations + from sentry_sdk.integrations.redis._sync_common import ( patch_redis_client, patch_redis_pipeline, ) -from sentry_sdk.integrations.redis.modules.queries import _set_db_data_on_span +from sentry_sdk.integrations.redis.modules.queries import _get_connection_data from sentry_sdk.integrations.redis.utils import _parse_rediscluster_command from sentry_sdk.utils import capture_internal_exceptions @@ -23,18 +25,21 @@ RedisCluster as AsyncRedisCluster, ClusterPipeline as AsyncClusterPipeline, ) - from sentry_sdk.tracing import Span -def _set_async_cluster_db_data(span, async_redis_cluster_instance): - # type: (Span, AsyncRedisCluster[Any]) -> None +def _get_async_cluster_db_data( + async_redis_cluster_instance: AsyncRedisCluster[Any], +) -> dict[str, Any]: default_node = async_redis_cluster_instance.get_default_node() if default_node is not None and default_node.connection_kwargs is not None: - _set_db_data_on_span(span, default_node.connection_kwargs) + return _get_connection_data(default_node.connection_kwargs) + else: + return {} -def _set_async_cluster_pipeline_db_data(span, async_redis_cluster_pipeline_instance): - # type: (Span, AsyncClusterPipeline[Any]) -> None +def _get_async_cluster_pipeline_db_data( + async_redis_cluster_pipeline_instance: AsyncClusterPipeline[Any], +) -> dict[str, Any]: with capture_internal_exceptions(): client = getattr(async_redis_cluster_pipeline_instance, "cluster_client", None) if client is None: @@ -46,14 +51,10 @@ def _set_async_cluster_pipeline_db_data(span, async_redis_cluster_pipeline_insta async_redis_cluster_pipeline_instance._client # type: ignore[attr-defined] ) - _set_async_cluster_db_data( - span, - client, - ) + return _get_async_cluster_db_data(client) -def _set_cluster_db_data(span, redis_cluster_instance): - # type: (Span, RedisCluster[Any]) -> None +def _get_cluster_db_data(redis_cluster_instance: RedisCluster[Any]) -> dict[str, Any]: default_node = redis_cluster_instance.get_default_node() if default_node is not None: @@ -61,11 +62,12 @@ def _set_cluster_db_data(span, redis_cluster_instance): "host": default_node.host, "port": default_node.port, } - _set_db_data_on_span(span, connection_params) + return _get_connection_data(connection_params) + else: + return {} -def _patch_redis_cluster(): - # type: () -> None +def _patch_redis_cluster() -> None: """Patches the cluster module on redis SDK (as opposed to rediscluster library)""" try: from redis import RedisCluster, cluster @@ -75,13 +77,13 @@ def _patch_redis_cluster(): patch_redis_client( RedisCluster, is_cluster=True, - set_db_data_fn=_set_cluster_db_data, + get_db_data_fn=_get_cluster_db_data, ) patch_redis_pipeline( cluster.ClusterPipeline, is_cluster=True, get_command_args_fn=_parse_rediscluster_command, - set_db_data_fn=_set_cluster_db_data, + get_db_data_fn=_get_cluster_db_data, ) try: @@ -97,11 +99,11 @@ def _patch_redis_cluster(): patch_redis_async_client( async_cluster.RedisCluster, is_cluster=True, - set_db_data_fn=_set_async_cluster_db_data, + get_db_data_fn=_get_async_cluster_db_data, ) patch_redis_async_pipeline( async_cluster.ClusterPipeline, is_cluster=True, get_command_args_fn=_parse_rediscluster_command, - set_db_data_fn=_set_async_cluster_pipeline_db_data, + get_db_data_fn=_get_async_cluster_pipeline_db_data, ) diff --git a/sentry_sdk/integrations/redis/redis_py_cluster_legacy.py b/sentry_sdk/integrations/redis/redis_py_cluster_legacy.py index ad1c23633f..e658443e81 100644 --- a/sentry_sdk/integrations/redis/redis_py_cluster_legacy.py +++ b/sentry_sdk/integrations/redis/redis_py_cluster_legacy.py @@ -5,16 +5,17 @@ https://github.com/grokzen/redis-py-cluster """ +from __future__ import annotations + from sentry_sdk.integrations.redis._sync_common import ( patch_redis_client, patch_redis_pipeline, ) -from sentry_sdk.integrations.redis.modules.queries import _set_db_data +from sentry_sdk.integrations.redis.modules.queries import _get_db_data from sentry_sdk.integrations.redis.utils import _parse_rediscluster_command -def _patch_rediscluster(): - # type: () -> None +def _patch_rediscluster() -> None: try: import rediscluster # type: ignore except ImportError: @@ -23,7 +24,7 @@ def _patch_rediscluster(): patch_redis_client( rediscluster.RedisCluster, is_cluster=True, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) # up to v1.3.6, __version__ attribute is a tuple @@ -37,7 +38,7 @@ def _patch_rediscluster(): patch_redis_client( rediscluster.StrictRedisCluster, is_cluster=True, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) else: pipeline_cls = rediscluster.pipeline.ClusterPipeline @@ -46,5 +47,5 @@ def _patch_rediscluster(): pipeline_cls, is_cluster=True, get_command_args_fn=_parse_rediscluster_command, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) diff --git a/sentry_sdk/integrations/redis/utils.py b/sentry_sdk/integrations/redis/utils.py index cf230f6648..e109d3fe34 100644 --- a/sentry_sdk/integrations/redis/utils.py +++ b/sentry_sdk/integrations/redis/utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations +import sentry_sdk from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.redis.consts import ( _COMMANDS_INCLUDING_SENSITIVE_DATA, @@ -16,8 +18,46 @@ from sentry_sdk.tracing import Span -def _get_safe_command(name, args): - # type: (str, Sequence[Any]) -> str +TAG_KEYS = [ + "redis.command", + "redis.is_cluster", + "redis.key", + "redis.transaction", + SPANDATA.DB_OPERATION, +] + + +def _update_span(span: Span, *data_bags: dict[str, Any]) -> None: + """ + Set tags and data on the given span to data from the given data bags. + """ + for data in data_bags: + for key, value in data.items(): + if key in TAG_KEYS: + span.set_tag(key, value) + else: + span.set_attribute(key, value) + + +def _create_breadcrumb(message: str, *data_bags: dict[str, Any]) -> None: + """ + Create a breadcrumb containing the tags data from the given data bags. + """ + data = {} + for data in data_bags: + for key, value in data.items(): + if key in TAG_KEYS: + data[key] = value + + sentry_sdk.add_breadcrumb( + message=message, + type="redis", + category="redis", + data=data, + ) + + +def _get_safe_command(name: str, args: Sequence[Any]) -> str: command_parts = [name] for i, arg in enumerate(args): @@ -44,8 +84,7 @@ def _get_safe_command(name, args): return command -def _safe_decode(key): - # type: (Any) -> str +def _safe_decode(key: Any) -> str: if isinstance(key, bytes): try: return key.decode() @@ -55,8 +94,7 @@ def _safe_decode(key): return str(key) -def _key_as_string(key): - # type: (Any) -> str +def _key_as_string(key: Any) -> str: if isinstance(key, (dict, list, tuple)): key = ", ".join(_safe_decode(x) for x in key) elif isinstance(key, bytes): @@ -69,8 +107,9 @@ def _key_as_string(key): return key -def _get_safe_key(method_name, args, kwargs): - # type: (str, Optional[tuple[Any, ...]], Optional[dict[str, Any]]) -> Optional[tuple[str, ...]] +def _get_safe_key( + method_name: str, args: Optional[tuple[Any, ...]], kwargs: Optional[dict[str, Any]] +) -> Optional[tuple[str, ...]]: """ Gets the key (or keys) from the given method_name. The method_name could be a redis command or a django caching command @@ -100,49 +139,49 @@ def _get_safe_key(method_name, args, kwargs): return key -def _parse_rediscluster_command(command): - # type: (Any) -> Sequence[Any] +def _parse_rediscluster_command(command: Any) -> Sequence[Any]: return command.args -def _set_pipeline_data( - span, - is_cluster, - get_command_args_fn, - is_transaction, - commands_seq, -): - # type: (Span, bool, Any, bool, Sequence[Any]) -> None - span.set_tag("redis.is_cluster", is_cluster) - span.set_tag("redis.transaction", is_transaction) +def _get_pipeline_data( + is_cluster: bool, + get_command_args_fn: Any, + is_transaction: bool, + command_seq: Sequence[Any], +) -> dict[str, Any]: + data: dict[str, Any] = { + "redis.is_cluster": is_cluster, + "redis.transaction": is_transaction, + } commands = [] - for i, arg in enumerate(commands_seq): + for i, arg in enumerate(command_seq): if i >= _MAX_NUM_COMMANDS: break command = get_command_args_fn(arg) commands.append(_get_safe_command(command[0], command[1:])) - span.set_data( - "redis.commands", - { - "count": len(commands_seq), - "first_ten": commands, - }, - ) + data["redis.commands.count"] = len(command_seq) + data["redis.commands.first_ten"] = commands + + return data -def _set_client_data(span, is_cluster, name, *args): - # type: (Span, bool, str, *Any) -> None - span.set_tag("redis.is_cluster", is_cluster) +def _get_client_data(is_cluster: bool, name: str, *args: Any) -> dict[str, Any]: + data: dict[str, Any] = { + "redis.is_cluster": is_cluster, + } + if name: - span.set_tag("redis.command", name) - span.set_tag(SPANDATA.DB_OPERATION, name) + data["redis.command"] = name + data[SPANDATA.DB_OPERATION] = name if name and args: name_low = name.lower() if (name_low in _SINGLE_KEY_COMMANDS) or ( name_low in _MULTI_KEY_COMMANDS and len(args) == 1 ): - span.set_tag("redis.key", args[0]) + data["redis.key"] = args[0] + + return data diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py index 6d7fcf723b..43a943c272 100644 --- a/sentry_sdk/integrations/rq.py +++ b/sentry_sdk/integrations/rq.py @@ -1,8 +1,8 @@ +from __future__ import annotations import weakref import sentry_sdk from sentry_sdk.consts import OP -from sentry_sdk.api import continue_trace from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.tracing import TransactionSource @@ -33,42 +33,56 @@ from rq.job import Job +DEFAULT_TRANSACTION_NAME = "unknown RQ task" + + +JOB_PROPERTY_TO_ATTRIBUTE = { + "id": "messaging.message.id", +} + +QUEUE_PROPERTY_TO_ATTRIBUTE = { + "name": "messaging.destination.name", +} + class RqIntegration(Integration): identifier = "rq" origin = f"auto.queue.{identifier}" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: version = parse_version(RQ_VERSION) _check_minimum_version(RqIntegration, version) old_perform_job = Worker.perform_job @ensure_integration_enabled(RqIntegration, old_perform_job) - def sentry_patched_perform_job(self, job, *args, **kwargs): - # type: (Any, Job, *Queue, **Any) -> bool + def sentry_patched_perform_job( + self: Any, job: Job, queue: Queue, *args: Any, **kwargs: Any + ) -> bool: with sentry_sdk.new_scope() as scope: - scope.clear_breadcrumbs() - scope.add_event_processor(_make_event_processor(weakref.ref(job))) + try: + transaction_name = job.func_name or DEFAULT_TRANSACTION_NAME + except AttributeError: + transaction_name = DEFAULT_TRANSACTION_NAME - transaction = continue_trace( - job.meta.get("_sentry_trace_headers") or {}, - op=OP.QUEUE_TASK_RQ, - name="unknown RQ task", - source=TransactionSource.TASK, - origin=RqIntegration.origin, + scope.set_transaction_name( + transaction_name, source=TransactionSource.TASK ) + scope.clear_breadcrumbs() + scope.add_event_processor(_make_event_processor(weakref.ref(job))) - with capture_internal_exceptions(): - transaction.name = job.func_name - - with sentry_sdk.start_transaction( - transaction, - custom_sampling_context={"rq_job": job}, + with sentry_sdk.continue_trace( + job.meta.get("_sentry_trace_headers") or {} ): - rv = old_perform_job(self, job, *args, **kwargs) + with sentry_sdk.start_span( + op=OP.QUEUE_TASK_RQ, + name=transaction_name, + source=TransactionSource.TASK, + origin=RqIntegration.origin, + attributes=_prepopulate_attributes(job, queue), + ): + rv = old_perform_job(self, job, queue, *args, **kwargs) if self.is_horse: # We're inside of a forked process and RQ is @@ -82,8 +96,9 @@ def sentry_patched_perform_job(self, job, *args, **kwargs): old_handle_exception = Worker.handle_exception - def sentry_patched_handle_exception(self, job, *exc_info, **kwargs): - # type: (Worker, Any, *Any, **Any) -> Any + def sentry_patched_handle_exception( + self: Worker, job: Any, *exc_info: Any, **kwargs: Any + ) -> Any: retry = ( hasattr(job, "retries_left") and job.retries_left @@ -100,13 +115,10 @@ def sentry_patched_handle_exception(self, job, *exc_info, **kwargs): old_enqueue_job = Queue.enqueue_job @ensure_integration_enabled(RqIntegration, old_enqueue_job) - def sentry_patched_enqueue_job(self, job, **kwargs): - # type: (Queue, Any, **Any) -> Any - scope = sentry_sdk.get_current_scope() - if scope.span is not None: - job.meta["_sentry_trace_headers"] = dict( - scope.iter_trace_propagation_headers() - ) + def sentry_patched_enqueue_job(self: Queue, job: Any, **kwargs: Any) -> Any: + job.meta["_sentry_trace_headers"] = dict( + sentry_sdk.get_current_scope().iter_trace_propagation_headers() + ) return old_enqueue_job(self, job, **kwargs) @@ -115,10 +127,8 @@ def sentry_patched_enqueue_job(self, job, **kwargs): ignore_logger("rq.worker") -def _make_event_processor(weak_job): - # type: (Callable[[], Job]) -> EventProcessor - def event_processor(event, hint): - # type: (Event, dict[str, Any]) -> Event +def _make_event_processor(weak_job: Callable[[], Job]) -> EventProcessor: + def event_processor(event: Event, hint: dict[str, Any]) -> Event: job = weak_job() if job is not None: with capture_internal_exceptions(): @@ -148,8 +158,7 @@ def event_processor(event, hint): return event_processor -def _capture_exception(exc_info, **kwargs): - # type: (ExcInfo, **Any) -> None +def _capture_exception(exc_info: ExcInfo, **kwargs: Any) -> None: client = sentry_sdk.get_client() event, hint = event_from_exception( @@ -159,3 +168,36 @@ def _capture_exception(exc_info, **kwargs): ) sentry_sdk.capture_event(event, hint=hint) + + +def _prepopulate_attributes(job: Job, queue: Queue) -> dict[str, Any]: + attributes = { + "messaging.system": "rq", + "rq.job.id": job.id, + } + + for prop, attr in JOB_PROPERTY_TO_ATTRIBUTE.items(): + if getattr(job, prop, None) is not None: + attributes[attr] = getattr(job, prop) + + for prop, attr in QUEUE_PROPERTY_TO_ATTRIBUTE.items(): + if getattr(queue, prop, None) is not None: + attributes[attr] = getattr(queue, prop) + + if getattr(job, "args", None): + for i, arg in enumerate(job.args): + with capture_internal_exceptions(): + attributes[f"rq.job.args.{i}"] = str(arg) + + if getattr(job, "kwargs", None): + for kwarg, value in job.kwargs.items(): + with capture_internal_exceptions(): + attributes[f"rq.job.kwargs.{kwarg}"] = str(value) + + func = job.func + if callable(func): + func = func.__name__ + + attributes["rq.job.func"] = str(func) + + return attributes diff --git a/sentry_sdk/integrations/rust_tracing.py b/sentry_sdk/integrations/rust_tracing.py index e4c211814f..304417036d 100644 --- a/sentry_sdk/integrations/rust_tracing.py +++ b/sentry_sdk/integrations/rust_tracing.py @@ -30,18 +30,20 @@ Each native extension requires its own integration. """ +from __future__ import annotations import json from enum import Enum, auto -from typing import Any, Callable, Dict, Tuple, Optional +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Callable, Dict, Optional import sentry_sdk from sentry_sdk.integrations import Integration from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import Span as SentrySpan +from sentry_sdk.tracing import Span from sentry_sdk.utils import SENSITIVE_DATA_SUBSTITUTE -TraceState = Optional[Tuple[Optional[SentrySpan], SentrySpan]] - class RustTracingLevel(Enum): Trace = "TRACE" @@ -58,8 +60,7 @@ class EventTypeMapping(Enum): Event = auto() -def tracing_level_to_sentry_level(level): - # type: (str) -> sentry_sdk._types.LogLevelStr +def tracing_level_to_sentry_level(level: str) -> sentry_sdk._types.LogLevelStr: level = RustTracingLevel(level) if level in (RustTracingLevel.Trace, RustTracingLevel.Debug): return "debug" @@ -99,15 +100,15 @@ def process_event(event: Dict[str, Any]) -> None: logger = metadata.get("target") level = tracing_level_to_sentry_level(metadata.get("level")) - message = event.get("message") # type: sentry_sdk._types.Any + message: sentry_sdk._types.Any = event.get("message") contexts = extract_contexts(event) - sentry_event = { + sentry_event: sentry_sdk._types.Event = { "logger": logger, "level": level, "message": message, "contexts": contexts, - } # type: sentry_sdk._types.Event + } sentry_sdk.capture_event(sentry_event) @@ -171,7 +172,7 @@ def _include_tracing_fields(self) -> bool: else self.include_tracing_fields ) - def on_event(self, event: str, _span_state: TraceState) -> None: + def on_event(self, event: str, _span_state: Optional[Span]) -> None: deserialized_event = json.loads(event) metadata = deserialized_event.get("metadata", {}) @@ -185,7 +186,7 @@ def on_event(self, event: str, _span_state: TraceState) -> None: elif event_type == EventTypeMapping.Event: process_event(deserialized_event) - def on_new_span(self, attrs: str, span_id: str) -> TraceState: + def on_new_span(self, attrs: str, span_id: str) -> Optional[Span]: attrs = json.loads(attrs) metadata = attrs.get("metadata", {}) @@ -205,48 +206,35 @@ def on_new_span(self, attrs: str, span_id: str) -> TraceState: else: sentry_span_name = "" - kwargs = { - "op": "function", - "name": sentry_span_name, - "origin": self.origin, - } - - scope = sentry_sdk.get_current_scope() - parent_sentry_span = scope.span - if parent_sentry_span: - sentry_span = parent_sentry_span.start_child(**kwargs) - else: - sentry_span = scope.start_span(**kwargs) + span = sentry_sdk.start_span( + op="function", + name=sentry_span_name, + origin=self.origin, + only_as_child_span=True, + ) + span.__enter__() fields = metadata.get("fields", []) for field in fields: if self._include_tracing_fields(): - sentry_span.set_data(field, attrs.get(field)) - else: - sentry_span.set_data(field, SENSITIVE_DATA_SUBSTITUTE) - - scope.span = sentry_span - return (parent_sentry_span, sentry_span) - - def on_close(self, span_id: str, span_state: TraceState) -> None: - if span_state is None: - return - - parent_sentry_span, sentry_span = span_state - sentry_span.finish() - sentry_sdk.get_current_scope().span = parent_sentry_span - - def on_record(self, span_id: str, values: str, span_state: TraceState) -> None: - if span_state is None: - return - _parent_sentry_span, sentry_span = span_state - - deserialized_values = json.loads(values) - for key, value in deserialized_values.items(): - if self._include_tracing_fields(): - sentry_span.set_data(key, value) + span.set_attribute(field, attrs.get(field)) else: - sentry_span.set_data(key, SENSITIVE_DATA_SUBSTITUTE) + span.set_attribute(field, SENSITIVE_DATA_SUBSTITUTE) + + return span + + def on_close(self, span_id: str, span: Optional[Span]) -> None: + if span is not None: + span.__exit__(None, None, None) + + def on_record(self, span_id: str, values: str, span: Optional[Span]) -> None: + if span is not None: + deserialized_values = json.loads(values) + for key, value in deserialized_values.items(): + if self._include_tracing_fields(): + span.set_attribute(key, value) + else: + span.set_attribute(key, SENSITIVE_DATA_SUBSTITUTE) class RustTracingIntegration(Integration): diff --git a/sentry_sdk/integrations/sanic.py b/sentry_sdk/integrations/sanic.py index bd8f1f329b..1eed090332 100644 --- a/sentry_sdk/integrations/sanic.py +++ b/sentry_sdk/integrations/sanic.py @@ -1,10 +1,10 @@ +from __future__ import annotations import sys import weakref from inspect import isawaitable from urllib.parse import urlsplit import sentry_sdk -from sentry_sdk import continue_trace from sentry_sdk.consts import OP from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import RequestExtractor, _filter_headers @@ -60,8 +60,9 @@ class SanicIntegration(Integration): origin = f"auto.http.{identifier}" version = None - def __init__(self, unsampled_statuses=frozenset({404})): - # type: (Optional[Container[int]]) -> None + def __init__( + self, unsampled_statuses: Optional[Container[int]] = frozenset({404}) + ) -> None: """ The unsampled_statuses parameter can be used to specify for which HTTP statuses the transactions should not be sent to Sentry. By default, transactions are sent for all @@ -71,8 +72,7 @@ def __init__(self, unsampled_statuses=frozenset({404})): self._unsampled_statuses = unsampled_statuses or set() @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: SanicIntegration.version = parse_version(SANIC_VERSION) _check_minimum_version(SanicIntegration, SanicIntegration.version) @@ -104,56 +104,45 @@ def setup_once(): class SanicRequestExtractor(RequestExtractor): - def content_length(self): - # type: () -> int + def content_length(self) -> int: if self.request.body is None: return 0 return len(self.request.body) - def cookies(self): - # type: () -> Dict[str, str] + def cookies(self) -> Dict[str, str]: return dict(self.request.cookies) - def raw_data(self): - # type: () -> bytes + def raw_data(self) -> bytes: return self.request.body - def form(self): - # type: () -> RequestParameters + def form(self) -> RequestParameters: return self.request.form - def is_json(self): - # type: () -> bool + def is_json(self) -> bool: raise NotImplementedError() - def json(self): - # type: () -> Optional[Any] + def json(self) -> Optional[Any]: return self.request.json - def files(self): - # type: () -> RequestParameters + def files(self) -> RequestParameters: return self.request.files - def size_of_file(self, file): - # type: (Any) -> int + def size_of_file(self, file: Any) -> int: return len(file.body or ()) -def _setup_sanic(): - # type: () -> None +def _setup_sanic() -> None: Sanic._startup = _startup ErrorHandler.lookup = _sentry_error_handler_lookup -def _setup_legacy_sanic(): - # type: () -> None +def _setup_legacy_sanic() -> None: Sanic.handle_request = _legacy_handle_request Router.get = _legacy_router_get ErrorHandler.lookup = _sentry_error_handler_lookup -async def _startup(self): - # type: (Sanic) -> None +async def _startup(self: Sanic) -> None: # This happens about as early in the lifecycle as possible, just after the # Request object is created. The body has not yet been consumed. self.signal("http.lifecycle.request")(_context_enter) @@ -172,8 +161,7 @@ async def _startup(self): await old_startup(self) -async def _context_enter(request): - # type: (Request) -> None +async def _context_enter(request: Request) -> None: request.ctx._sentry_do_integration = ( sentry_sdk.get_client().get_integration(SanicIntegration) is not None ) @@ -182,26 +170,31 @@ async def _context_enter(request): return weak_request = weakref.ref(request) - request.ctx._sentry_scope = sentry_sdk.isolation_scope() - scope = request.ctx._sentry_scope.__enter__() + request.ctx._sentry_scope_manager = sentry_sdk.isolation_scope() + scope = request.ctx._sentry_scope_manager.__enter__() + request.ctx._sentry_scope = scope + + scope.set_transaction_name(request.path, TransactionSource.URL) scope.clear_breadcrumbs() scope.add_event_processor(_make_request_processor(weak_request)) - transaction = continue_trace( - dict(request.headers), + # TODO-neel-potel test if this works + request.ctx._sentry_continue_trace = sentry_sdk.continue_trace( + dict(request.headers) + ) + request.ctx._sentry_continue_trace.__enter__() + request.ctx._sentry_transaction = sentry_sdk.start_span( op=OP.HTTP_SERVER, # Unless the request results in a 404 error, the name and source will get overwritten in _set_transaction name=request.path, source=TransactionSource.URL, origin=SanicIntegration.origin, - ) - request.ctx._sentry_transaction = sentry_sdk.start_transaction( - transaction ).__enter__() -async def _context_exit(request, response=None): - # type: (Request, Optional[BaseHTTPResponse]) -> None +async def _context_exit( + request: Request, response: Optional[BaseHTTPResponse] = None +) -> None: with capture_internal_exceptions(): if not request.ctx._sentry_do_integration: return @@ -211,20 +204,26 @@ async def _context_exit(request, response=None): response_status = None if response is None else response.status # This capture_internal_exceptions block has been intentionally nested here, so that in case an exception - # happens while trying to end the transaction, we still attempt to exit the hub. + # happens while trying to end the transaction, we still attempt to exit the scope. with capture_internal_exceptions(): request.ctx._sentry_transaction.set_http_status(response_status) - request.ctx._sentry_transaction.sampled &= ( + + if ( isinstance(integration, SanicIntegration) - and response_status not in integration._unsampled_statuses - ) + and response_status in integration._unsampled_statuses + ): + # drop the event in an event processor + request.ctx._sentry_scope.add_event_processor( + lambda _event, _hint: None + ) + request.ctx._sentry_transaction.__exit__(None, None, None) + request.ctx._sentry_continue_trace.__exit__(None, None, None) - request.ctx._sentry_scope.__exit__(None, None, None) + request.ctx._sentry_scope_manager.__exit__(None, None, None) -async def _set_transaction(request, route, **_): - # type: (Request, Route, **Any) -> None +async def _set_transaction(request: Request, route: Route, **_: Any) -> None: if request.ctx._sentry_do_integration: with capture_internal_exceptions(): scope = sentry_sdk.get_current_scope() @@ -232,8 +231,9 @@ async def _set_transaction(request, route, **_): scope.set_transaction_name(route_name, source=TransactionSource.COMPONENT) -def _sentry_error_handler_lookup(self, exception, *args, **kwargs): - # type: (Any, Exception, *Any, **Any) -> Optional[object] +def _sentry_error_handler_lookup( + self: Any, exception: Exception, *args: Any, **kwargs: Any +) -> Optional[object]: _capture_exception(exception) old_error_handler = old_error_handler_lookup(self, exception, *args, **kwargs) @@ -243,8 +243,9 @@ def _sentry_error_handler_lookup(self, exception, *args, **kwargs): if sentry_sdk.get_client().get_integration(SanicIntegration) is None: return old_error_handler - async def sentry_wrapped_error_handler(request, exception): - # type: (Request, Exception) -> Any + async def sentry_wrapped_error_handler( + request: Request, exception: Exception + ) -> Any: try: response = old_error_handler(request, exception) if isawaitable(response): @@ -266,8 +267,9 @@ async def sentry_wrapped_error_handler(request, exception): return sentry_wrapped_error_handler -async def _legacy_handle_request(self, request, *args, **kwargs): - # type: (Any, Request, *Any, **Any) -> Any +async def _legacy_handle_request( + self: Any, request: Request, *args: Any, **kwargs: Any +) -> Any: if sentry_sdk.get_client().get_integration(SanicIntegration) is None: return await old_handle_request(self, request, *args, **kwargs) @@ -284,8 +286,7 @@ async def _legacy_handle_request(self, request, *args, **kwargs): return response -def _legacy_router_get(self, *args): - # type: (Any, Union[Any, Request]) -> Any +def _legacy_router_get(self: Any, *args: Union[Any, Request]) -> Any: rv = old_router_get(self, *args) if sentry_sdk.get_client().get_integration(SanicIntegration) is not None: with capture_internal_exceptions(): @@ -315,8 +316,7 @@ def _legacy_router_get(self, *args): @ensure_integration_enabled(SanicIntegration) -def _capture_exception(exception): - # type: (Union[ExcInfo, BaseException]) -> None +def _capture_exception(exception: Union[ExcInfo, BaseException]) -> None: with capture_internal_exceptions(): event, hint = event_from_exception( exception, @@ -330,10 +330,8 @@ def _capture_exception(exception): sentry_sdk.capture_event(event, hint=hint) -def _make_request_processor(weak_request): - # type: (Callable[[], Request]) -> EventProcessor - def sanic_processor(event, hint): - # type: (Event, Optional[Hint]) -> Optional[Event] +def _make_request_processor(weak_request: Callable[[], Request]) -> EventProcessor: + def sanic_processor(event: Event, hint: Optional[Hint]) -> Optional[Event]: try: if hint and issubclass(hint["exc_info"][0], SanicException): diff --git a/sentry_sdk/integrations/serverless.py b/sentry_sdk/integrations/serverless.py index 760c07ffad..dd8fbe526d 100644 --- a/sentry_sdk/integrations/serverless.py +++ b/sentry_sdk/integrations/serverless.py @@ -1,47 +1,43 @@ +from __future__ import annotations import sys from functools import wraps import sentry_sdk from sentry_sdk.utils import event_from_exception, reraise -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, overload if TYPE_CHECKING: - from typing import Any + from typing import NoReturn from typing import Callable from typing import TypeVar + from typing import ParamSpec from typing import Union from typing import Optional - from typing import overload - F = TypeVar("F", bound=Callable[..., Any]) + T = TypeVar("T") + P = ParamSpec("P") -else: - def overload(x): - # type: (F) -> F - return x - - -@overload -def serverless_function(f, flush=True): - # type: (F, bool) -> F - pass +if TYPE_CHECKING: + @overload + def serverless_function(f: Callable[P, T], flush: bool = True) -> Callable[P, T]: + pass -@overload -def serverless_function(f=None, flush=True): # noqa: F811 - # type: (None, bool) -> Callable[[F], F] - pass + @overload + def serverless_function( + f: None = None, flush: bool = True + ) -> Callable[[Callable[P, T]], Callable[P, T]]: + pass -def serverless_function(f=None, flush=True): # noqa - # type: (Optional[F], bool) -> Union[F, Callable[[F], F]] - def wrapper(f): - # type: (F) -> F +def serverless_function( + f: Optional[Callable[P, T]] = None, flush: bool = True +) -> Union[Callable[P, T], Callable[[Callable[P, T]], Callable[P, T]]]: + def wrapper(f: Callable[P, T]) -> Callable[P, T]: @wraps(f) - def inner(*args, **kwargs): - # type: (*Any, **Any) -> Any + def inner(*args: P.args, **kwargs: P.kwargs) -> T: with sentry_sdk.isolation_scope() as scope: scope.clear_breadcrumbs() @@ -53,7 +49,7 @@ def inner(*args, **kwargs): if flush: sentry_sdk.flush() - return inner # type: ignore + return inner if f is None: return wrapper @@ -61,8 +57,7 @@ def inner(*args, **kwargs): return wrapper(f) -def _capture_and_reraise(): - # type: () -> None +def _capture_and_reraise() -> NoReturn: exc_info = sys.exc_info() client = sentry_sdk.get_client() if client.is_active(): diff --git a/sentry_sdk/integrations/socket.py b/sentry_sdk/integrations/socket.py index babf61aa7a..2e933310cb 100644 --- a/sentry_sdk/integrations/socket.py +++ b/sentry_sdk/integrations/socket.py @@ -1,3 +1,4 @@ +from __future__ import annotations import socket import sentry_sdk @@ -17,8 +18,7 @@ class SocketIntegration(Integration): origin = f"auto.socket.{identifier}" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: """ patches two of the most used functions of socket: create_connection and getaddrinfo(dns resolver) """ @@ -26,8 +26,9 @@ def setup_once(): _patch_getaddrinfo() -def _get_span_description(host, port): - # type: (Union[bytes, str, None], Union[bytes, str, int, None]) -> str +def _get_span_description( + host: Union[bytes, str, None], port: Union[bytes, str, int, None] +) -> str: try: host = host.decode() # type: ignore @@ -43,16 +44,14 @@ def _get_span_description(host, port): return description -def _patch_create_connection(): - # type: () -> None +def _patch_create_connection() -> None: real_create_connection = socket.create_connection def create_connection( - address, - timeout=socket._GLOBAL_DEFAULT_TIMEOUT, # type: ignore - source_address=None, - ): - # type: (Tuple[Optional[str], int], Optional[float], Optional[Tuple[Union[bytearray, bytes, str], int]])-> socket.socket + address: Tuple[Optional[str], int], + timeout: Optional[float] = socket._GLOBAL_DEFAULT_TIMEOUT, # type: ignore + source_address: Optional[Tuple[Union[bytearray, bytes, str], int]] = None, + ) -> socket.socket: integration = sentry_sdk.get_client().get_integration(SocketIntegration) if integration is None: return real_create_connection(address, timeout, source_address) @@ -61,10 +60,13 @@ def create_connection( op=OP.SOCKET_CONNECTION, name=_get_span_description(address[0], address[1]), origin=SocketIntegration.origin, + only_as_child_span=True, ) as span: - span.set_data("address", address) - span.set_data("timeout", timeout) - span.set_data("source_address", source_address) + host, port = address + span.set_attribute("address.host", host) + span.set_attribute("address.port", port) + span.set_attribute("timeout", timeout) + span.set_attribute("source_address", source_address) return real_create_connection( address=address, timeout=timeout, source_address=source_address @@ -73,12 +75,25 @@ def create_connection( socket.create_connection = create_connection # type: ignore -def _patch_getaddrinfo(): - # type: () -> None +def _patch_getaddrinfo() -> None: real_getaddrinfo = socket.getaddrinfo - def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): - # type: (Union[bytes, str, None], Union[bytes, str, int, None], int, int, int, int) -> List[Tuple[AddressFamily, SocketKind, int, str, Union[Tuple[str, int], Tuple[str, int, int, int], Tuple[int, bytes]]]] + def getaddrinfo( + host: Union[bytes, str, None], + port: Union[bytes, str, int, None], + family: int = 0, + type: int = 0, + proto: int = 0, + flags: int = 0, + ) -> List[ + Tuple[ + AddressFamily, + SocketKind, + int, + str, + Union[Tuple[str, int], Tuple[str, int, int, int], Tuple[int, bytes]], + ] + ]: integration = sentry_sdk.get_client().get_integration(SocketIntegration) if integration is None: return real_getaddrinfo(host, port, family, type, proto, flags) @@ -87,9 +102,10 @@ def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): op=OP.SOCKET_DNS, name=_get_span_description(host, port), origin=SocketIntegration.origin, + only_as_child_span=True, ) as span: - span.set_data("host", host) - span.set_data("port", port) + span.set_attribute("host", host) + span.set_attribute("port", port) return real_getaddrinfo(host, port, family, type, proto, flags) diff --git a/sentry_sdk/integrations/spark/__init__.py b/sentry_sdk/integrations/spark/__init__.py index 10d94163c5..d9e8e3fa84 100644 --- a/sentry_sdk/integrations/spark/__init__.py +++ b/sentry_sdk/integrations/spark/__init__.py @@ -1,3 +1,4 @@ +from __future__ import annotations from sentry_sdk.integrations.spark.spark_driver import SparkIntegration from sentry_sdk.integrations.spark.spark_worker import SparkWorkerIntegration diff --git a/sentry_sdk/integrations/spark/spark_driver.py b/sentry_sdk/integrations/spark/spark_driver.py index fac985357f..a35883b60f 100644 --- a/sentry_sdk/integrations/spark/spark_driver.py +++ b/sentry_sdk/integrations/spark/spark_driver.py @@ -1,3 +1,4 @@ +from __future__ import annotations import sentry_sdk from sentry_sdk.integrations import Integration from sentry_sdk.utils import capture_internal_exceptions, ensure_integration_enabled @@ -16,13 +17,11 @@ class SparkIntegration(Integration): identifier = "spark" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: _setup_sentry_tracing() -def _set_app_properties(): - # type: () -> None +def _set_app_properties() -> None: """ Set properties in driver that propagate to worker processes, allowing for workers to have access to those properties. This allows worker integration to have access to app_name and application_id. @@ -41,8 +40,7 @@ def _set_app_properties(): ) -def _start_sentry_listener(sc): - # type: (SparkContext) -> None +def _start_sentry_listener(sc: SparkContext) -> None: """ Start java gateway server to add custom `SparkListener` """ @@ -54,13 +52,11 @@ def _start_sentry_listener(sc): sc._jsc.sc().addSparkListener(listener) -def _add_event_processor(sc): - # type: (SparkContext) -> None +def _add_event_processor(sc: SparkContext) -> None: scope = sentry_sdk.get_isolation_scope() @scope.add_event_processor - def process_event(event, hint): - # type: (Event, Hint) -> Optional[Event] + def process_event(event: Event, hint: Hint) -> Optional[Event]: with capture_internal_exceptions(): if sentry_sdk.get_client().get_integration(SparkIntegration) is None: return event @@ -90,23 +86,22 @@ def process_event(event, hint): return event -def _activate_integration(sc): - # type: (SparkContext) -> None +def _activate_integration(sc: SparkContext) -> None: _start_sentry_listener(sc) _set_app_properties() _add_event_processor(sc) -def _patch_spark_context_init(): - # type: () -> None +def _patch_spark_context_init() -> None: from pyspark import SparkContext spark_context_init = SparkContext._do_init @ensure_integration_enabled(SparkIntegration, spark_context_init) - def _sentry_patched_spark_context_init(self, *args, **kwargs): - # type: (SparkContext, *Any, **Any) -> Optional[Any] + def _sentry_patched_spark_context_init( + self: SparkContext, *args: Any, **kwargs: Any + ) -> Optional[Any]: rv = spark_context_init(self, *args, **kwargs) _activate_integration(self) return rv @@ -114,8 +109,7 @@ def _sentry_patched_spark_context_init(self, *args, **kwargs): SparkContext._do_init = _sentry_patched_spark_context_init -def _setup_sentry_tracing(): - # type: () -> None +def _setup_sentry_tracing() -> None: from pyspark import SparkContext if SparkContext._active_spark_context is not None: @@ -125,102 +119,76 @@ def _setup_sentry_tracing(): class SparkListener: - def onApplicationEnd(self, applicationEnd): # noqa: N802,N803 - # type: (Any) -> None + def onApplicationEnd(self, applicationEnd: Any) -> None: pass - def onApplicationStart(self, applicationStart): # noqa: N802,N803 - # type: (Any) -> None + def onApplicationStart(self, applicationStart: Any) -> None: pass - def onBlockManagerAdded(self, blockManagerAdded): # noqa: N802,N803 - # type: (Any) -> None + def onBlockManagerAdded(self, blockManagerAdded: Any) -> None: pass - def onBlockManagerRemoved(self, blockManagerRemoved): # noqa: N802,N803 - # type: (Any) -> None + def onBlockManagerRemoved(self, blockManagerRemoved: Any) -> None: pass - def onBlockUpdated(self, blockUpdated): # noqa: N802,N803 - # type: (Any) -> None + def onBlockUpdated(self, blockUpdated: Any) -> None: pass - def onEnvironmentUpdate(self, environmentUpdate): # noqa: N802,N803 - # type: (Any) -> None + def onEnvironmentUpdate(self, environmentUpdate: Any) -> None: pass - def onExecutorAdded(self, executorAdded): # noqa: N802,N803 - # type: (Any) -> None + def onExecutorAdded(self, executorAdded: Any) -> None: pass - def onExecutorBlacklisted(self, executorBlacklisted): # noqa: N802,N803 - # type: (Any) -> None + def onExecutorBlacklisted(self, executorBlacklisted: Any) -> None: pass - def onExecutorBlacklistedForStage( # noqa: N802 - self, executorBlacklistedForStage # noqa: N803 - ): - # type: (Any) -> None + def onExecutorBlacklistedForStage(self, executorBlacklistedForStage: Any) -> None: pass - def onExecutorMetricsUpdate(self, executorMetricsUpdate): # noqa: N802,N803 - # type: (Any) -> None + def onExecutorMetricsUpdate(self, executorMetricsUpdate: Any) -> None: pass - def onExecutorRemoved(self, executorRemoved): # noqa: N802,N803 - # type: (Any) -> None + def onExecutorRemoved(self, executorRemoved: Any) -> None: pass - def onJobEnd(self, jobEnd): # noqa: N802,N803 - # type: (Any) -> None + def onJobEnd(self, jobEnd: Any) -> None: pass - def onJobStart(self, jobStart): # noqa: N802,N803 - # type: (Any) -> None + def onJobStart(self, jobStart: Any) -> None: pass - def onNodeBlacklisted(self, nodeBlacklisted): # noqa: N802,N803 - # type: (Any) -> None + def onNodeBlacklisted(self, nodeBlacklisted: Any) -> None: pass - def onNodeBlacklistedForStage(self, nodeBlacklistedForStage): # noqa: N802,N803 - # type: (Any) -> None + def onNodeBlacklistedForStage(self, nodeBlacklistedForStage: Any) -> None: pass - def onNodeUnblacklisted(self, nodeUnblacklisted): # noqa: N802,N803 - # type: (Any) -> None + def onNodeUnblacklisted(self, nodeUnblacklisted: Any) -> None: pass - def onOtherEvent(self, event): # noqa: N802,N803 - # type: (Any) -> None + def onOtherEvent(self, event: Any) -> None: pass - def onSpeculativeTaskSubmitted(self, speculativeTask): # noqa: N802,N803 - # type: (Any) -> None + def onSpeculativeTaskSubmitted(self, speculativeTask: Any) -> None: pass - def onStageCompleted(self, stageCompleted): # noqa: N802,N803 - # type: (Any) -> None + def onStageCompleted(self, stageCompleted: Any) -> None: pass - def onStageSubmitted(self, stageSubmitted): # noqa: N802,N803 - # type: (Any) -> None + def onStageSubmitted(self, stageSubmitted: Any) -> None: pass - def onTaskEnd(self, taskEnd): # noqa: N802,N803 - # type: (Any) -> None + def onTaskEnd(self, taskEnd: Any) -> None: pass - def onTaskGettingResult(self, taskGettingResult): # noqa: N802,N803 - # type: (Any) -> None + def onTaskGettingResult(self, taskGettingResult: Any) -> None: pass - def onTaskStart(self, taskStart): # noqa: N802,N803 - # type: (Any) -> None + def onTaskStart(self, taskStart: Any) -> None: pass - def onUnpersistRDD(self, unpersistRDD): # noqa: N802,N803 - # type: (Any) -> None + def onUnpersistRDD(self, unpersistRDD: Any) -> None: pass class Java: @@ -230,25 +198,22 @@ class Java: class SentryListener(SparkListener): def _add_breadcrumb( self, - level, # type: str - message, # type: str - data=None, # type: Optional[dict[str, Any]] - ): - # type: (...) -> None + level: str, + message: str, + data: Optional[dict[str, Any]] = None, + ) -> None: sentry_sdk.get_isolation_scope().add_breadcrumb( level=level, message=message, data=data ) - def onJobStart(self, jobStart): # noqa: N802,N803 - # type: (Any) -> None + def onJobStart(self, jobStart: Any) -> None: sentry_sdk.get_isolation_scope().clear_breadcrumbs() message = "Job {} Started".format(jobStart.jobId()) self._add_breadcrumb(level="info", message=message) _set_app_properties() - def onJobEnd(self, jobEnd): # noqa: N802,N803 - # type: (Any) -> None + def onJobEnd(self, jobEnd: Any) -> None: level = "" message = "" data = {"result": jobEnd.jobResult().toString()} @@ -262,8 +227,7 @@ def onJobEnd(self, jobEnd): # noqa: N802,N803 self._add_breadcrumb(level=level, message=message, data=data) - def onStageSubmitted(self, stageSubmitted): # noqa: N802,N803 - # type: (Any) -> None + def onStageSubmitted(self, stageSubmitted: Any) -> None: stage_info = stageSubmitted.stageInfo() message = "Stage {} Submitted".format(stage_info.stageId()) @@ -275,8 +239,7 @@ def onStageSubmitted(self, stageSubmitted): # noqa: N802,N803 self._add_breadcrumb(level="info", message=message, data=data) _set_app_properties() - def onStageCompleted(self, stageCompleted): # noqa: N802,N803 - # type: (Any) -> None + def onStageCompleted(self, stageCompleted: Any) -> None: from py4j.protocol import Py4JJavaError # type: ignore stage_info = stageCompleted.stageInfo() @@ -300,8 +263,7 @@ def onStageCompleted(self, stageCompleted): # noqa: N802,N803 self._add_breadcrumb(level=level, message=message, data=data) -def _get_attempt_id(stage_info): - # type: (Any) -> Optional[int] +def _get_attempt_id(stage_info: Any) -> Optional[int]: try: return stage_info.attemptId() except Exception: diff --git a/sentry_sdk/integrations/spark/spark_worker.py b/sentry_sdk/integrations/spark/spark_worker.py index 5340a0b350..ce42c752f5 100644 --- a/sentry_sdk/integrations/spark/spark_worker.py +++ b/sentry_sdk/integrations/spark/spark_worker.py @@ -1,3 +1,4 @@ +from __future__ import annotations import sys import sentry_sdk @@ -23,15 +24,13 @@ class SparkWorkerIntegration(Integration): identifier = "spark_worker" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: import pyspark.daemon as original_daemon original_daemon.worker_main = _sentry_worker_main -def _capture_exception(exc_info): - # type: (ExcInfo) -> None +def _capture_exception(exc_info: ExcInfo) -> None: client = sentry_sdk.get_client() mechanism = {"type": "spark", "handled": False} @@ -53,22 +52,20 @@ def _capture_exception(exc_info): if rv: rv.reverse() hint = event_hint_with_exc_info(exc_info) - event = {"level": "error", "exception": {"values": rv}} # type: Event + event: Event = {"level": "error", "exception": {"values": rv}} _tag_task_context() sentry_sdk.capture_event(event, hint=hint) -def _tag_task_context(): - # type: () -> None +def _tag_task_context() -> None: from pyspark.taskcontext import TaskContext scope = sentry_sdk.get_isolation_scope() @scope.add_event_processor - def process_event(event, hint): - # type: (Event, Hint) -> Optional[Event] + def process_event(event: Event, hint: Hint) -> Optional[Event]: with capture_internal_exceptions(): integration = sentry_sdk.get_client().get_integration( SparkWorkerIntegration @@ -103,8 +100,7 @@ def process_event(event, hint): return event -def _sentry_worker_main(*args, **kwargs): - # type: (*Optional[Any], **Optional[Any]) -> None +def _sentry_worker_main(*args: Optional[Any], **kwargs: Optional[Any]) -> None: import pyspark.worker as original_worker try: diff --git a/sentry_sdk/integrations/sqlalchemy.py b/sentry_sdk/integrations/sqlalchemy.py index 068d373053..658d10b3ca 100644 --- a/sentry_sdk/integrations/sqlalchemy.py +++ b/sentry_sdk/integrations/sqlalchemy.py @@ -1,3 +1,4 @@ +from __future__ import annotations from sentry_sdk.consts import SPANSTATUS, SPANDATA from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable from sentry_sdk.tracing_utils import add_query_source, record_sql_queries @@ -29,8 +30,7 @@ class SqlalchemyIntegration(Integration): origin = f"auto.db.{identifier}" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: version = parse_version(SQLALCHEMY_VERSION) _check_minimum_version(SqlalchemyIntegration, version) @@ -41,9 +41,14 @@ def setup_once(): @ensure_integration_enabled(SqlalchemyIntegration) def _before_cursor_execute( - conn, cursor, statement, parameters, context, executemany, *args -): - # type: (Any, Any, Any, Any, Any, bool, *Any) -> None + conn: Any, + cursor: Any, + statement: Any, + parameters: Any, + context: Any, + executemany: bool, + *args: Any, +) -> None: ctx_mgr = record_sql_queries( cursor, statement, @@ -62,29 +67,29 @@ def _before_cursor_execute( @ensure_integration_enabled(SqlalchemyIntegration) -def _after_cursor_execute(conn, cursor, statement, parameters, context, *args): - # type: (Any, Any, Any, Any, Any, *Any) -> None - ctx_mgr = getattr( +def _after_cursor_execute( + conn: Any, cursor: Any, statement: Any, parameters: Any, context: Any, *args: Any +) -> None: + ctx_mgr: Optional[ContextManager[Any]] = getattr( context, "_sentry_sql_span_manager", None - ) # type: Optional[ContextManager[Any]] - - if ctx_mgr is not None: - context._sentry_sql_span_manager = None - ctx_mgr.__exit__(None, None, None) + ) - span = getattr(context, "_sentry_sql_span", None) # type: Optional[Span] + span: Optional[Span] = getattr(context, "_sentry_sql_span", None) if span is not None: with capture_internal_exceptions(): add_query_source(span) + if ctx_mgr is not None: + context._sentry_sql_span_manager = None + ctx_mgr.__exit__(None, None, None) -def _handle_error(context, *args): - # type: (Any, *Any) -> None + +def _handle_error(context: Any, *args: Any) -> None: execution_context = context.execution_context if execution_context is None: return - span = getattr(execution_context, "_sentry_sql_span", None) # type: Optional[Span] + span: Optional[Span] = getattr(execution_context, "_sentry_sql_span", None) if span is not None: span.set_status(SPANSTATUS.INTERNAL_ERROR) @@ -92,9 +97,9 @@ def _handle_error(context, *args): # _after_cursor_execute does not get called for crashing SQL stmts. Judging # from SQLAlchemy codebase it does seem like any error coming into this # handler is going to be fatal. - ctx_mgr = getattr( + ctx_mgr: Optional[ContextManager[Any]] = getattr( execution_context, "_sentry_sql_span_manager", None - ) # type: Optional[ContextManager[Any]] + ) if ctx_mgr is not None: execution_context._sentry_sql_span_manager = None @@ -102,8 +107,7 @@ def _handle_error(context, *args): # See: https://docs.sqlalchemy.org/en/20/dialects/index.html -def _get_db_system(name): - # type: (str) -> Optional[str] +def _get_db_system(name: str) -> Optional[str]: name = str(name) if "sqlite" in name: @@ -124,23 +128,22 @@ def _get_db_system(name): return None -def _set_db_data(span, conn): - # type: (Span, Any) -> None +def _set_db_data(span: Span, conn: Any) -> None: db_system = _get_db_system(conn.engine.name) if db_system is not None: - span.set_data(SPANDATA.DB_SYSTEM, db_system) + span.set_attribute(SPANDATA.DB_SYSTEM, db_system) if conn.engine.url is None: return db_name = conn.engine.url.database if db_name is not None: - span.set_data(SPANDATA.DB_NAME, db_name) + span.set_attribute(SPANDATA.DB_NAME, db_name) server_address = conn.engine.url.host if server_address is not None: - span.set_data(SPANDATA.SERVER_ADDRESS, server_address) + span.set_attribute(SPANDATA.SERVER_ADDRESS, server_address) server_port = conn.engine.url.port if server_port is not None: - span.set_data(SPANDATA.SERVER_PORT, server_port) + span.set_attribute(SPANDATA.SERVER_PORT, server_port) diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py index d0f0bf2045..2d767ed8b8 100644 --- a/sentry_sdk/integrations/starlette.py +++ b/sentry_sdk/integrations/starlette.py @@ -1,12 +1,12 @@ +from __future__ import annotations import asyncio import functools -import warnings from collections.abc import Set from copy import deepcopy from json import JSONDecodeError import sentry_sdk -from sentry_sdk.consts import OP +from sentry_sdk.consts import OP, SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.integrations import ( DidNotEnable, Integration, @@ -14,16 +14,11 @@ ) from sentry_sdk.integrations._wsgi_common import ( DEFAULT_HTTP_METHODS_TO_CAPTURE, - HttpCodeRangeContainer, _is_json_content_type, request_body_within_bounds, ) from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import ( - SOURCE_FOR_STYLE, - TransactionSource, -) from sentry_sdk.utils import ( AnnotatedValue, capture_internal_exceptions, @@ -37,9 +32,9 @@ from typing import TYPE_CHECKING if TYPE_CHECKING: - from typing import Any, Awaitable, Callable, Container, Dict, Optional, Tuple, Union + from typing import Any, Awaitable, Callable, Dict, Optional, Tuple - from sentry_sdk._types import Event, HttpStatusCodeRange + from sentry_sdk._types import Event try: import starlette # type: ignore @@ -88,12 +83,11 @@ class StarletteIntegration(Integration): def __init__( self, - transaction_style="url", # type: str - failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES, # type: Union[Set[int], list[HttpStatusCodeRange], None] - middleware_spans=True, # type: bool - http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: tuple[str, ...] - ): - # type: (...) -> None + transaction_style: str = "url", + failed_request_status_codes: Set[int] = _DEFAULT_FAILED_REQUEST_STATUS_CODES, + middleware_spans: bool = True, + http_methods_to_capture: tuple[str, ...] = DEFAULT_HTTP_METHODS_TO_CAPTURE, + ) -> None: if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" @@ -103,28 +97,10 @@ def __init__( self.middleware_spans = middleware_spans self.http_methods_to_capture = tuple(map(str.upper, http_methods_to_capture)) - if isinstance(failed_request_status_codes, Set): - self.failed_request_status_codes = ( - failed_request_status_codes - ) # type: Container[int] - else: - warnings.warn( - "Passing a list or None for failed_request_status_codes is deprecated. " - "Please pass a set of int instead.", - DeprecationWarning, - stacklevel=2, - ) - - if failed_request_status_codes is None: - self.failed_request_status_codes = _DEFAULT_FAILED_REQUEST_STATUS_CODES - else: - self.failed_request_status_codes = HttpCodeRangeContainer( - failed_request_status_codes - ) + self.failed_request_status_codes = failed_request_status_codes @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: version = parse_version(STARLETTE_VERSION) if version is None: @@ -140,12 +116,16 @@ def setup_once(): patch_templates() -def _enable_span_for_middleware(middleware_class): - # type: (Any) -> type +def _enable_span_for_middleware(middleware_class: Any) -> type: old_call = middleware_class.__call__ - async def _create_span_call(app, scope, receive, send, **kwargs): - # type: (Any, Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]], Any) -> None + async def _create_span_call( + app: Any, + scope: Dict[str, Any], + receive: Callable[[], Awaitable[Dict[str, Any]]], + send: Callable[[Dict[str, Any]], Awaitable[None]], + **kwargs: Any, + ) -> None: integration = sentry_sdk.get_client().get_integration(StarletteIntegration) if integration is None or not integration.middleware_spans: return await old_call(app, scope, receive, send, **kwargs) @@ -164,16 +144,17 @@ async def _create_span_call(app, scope, receive, send, **kwargs): op=OP.MIDDLEWARE_STARLETTE, name=middleware_name, origin=StarletteIntegration.origin, + only_as_child_span=True, ) as middleware_span: middleware_span.set_tag("starlette.middleware_name", middleware_name) # Creating spans for the "receive" callback - async def _sentry_receive(*args, **kwargs): - # type: (*Any, **Any) -> Any + async def _sentry_receive(*args: Any, **kwargs: Any) -> Any: with sentry_sdk.start_span( op=OP.MIDDLEWARE_STARLETTE_RECEIVE, name=getattr(receive, "__qualname__", str(receive)), origin=StarletteIntegration.origin, + only_as_child_span=True, ) as span: span.set_tag("starlette.middleware_name", middleware_name) return await receive(*args, **kwargs) @@ -183,12 +164,12 @@ async def _sentry_receive(*args, **kwargs): new_receive = _sentry_receive if not receive_patched else receive # Creating spans for the "send" callback - async def _sentry_send(*args, **kwargs): - # type: (*Any, **Any) -> Any + async def _sentry_send(*args: Any, **kwargs: Any) -> Any: with sentry_sdk.start_span( op=OP.MIDDLEWARE_STARLETTE_SEND, name=getattr(send, "__qualname__", str(send)), origin=StarletteIntegration.origin, + only_as_child_span=True, ) as span: span.set_tag("starlette.middleware_name", middleware_name) return await send(*args, **kwargs) @@ -212,8 +193,7 @@ async def _sentry_send(*args, **kwargs): @ensure_integration_enabled(StarletteIntegration) -def _capture_exception(exception, handled=False): - # type: (BaseException, **Any) -> None +def _capture_exception(exception: BaseException, handled: Any = False) -> None: event, hint = event_from_exception( exception, client_options=sentry_sdk.get_client().options, @@ -223,8 +203,7 @@ def _capture_exception(exception, handled=False): sentry_sdk.capture_event(event, hint=hint) -def patch_exception_middleware(middleware_class): - # type: (Any) -> None +def patch_exception_middleware(middleware_class: Any) -> None: """ Capture all exceptions in Starlette app and also extract user information. @@ -235,15 +214,15 @@ def patch_exception_middleware(middleware_class): if not_yet_patched: - def _sentry_middleware_init(self, *args, **kwargs): - # type: (Any, Any, Any) -> None + def _sentry_middleware_init(self: Any, *args: Any, **kwargs: Any) -> None: old_middleware_init(self, *args, **kwargs) # Patch existing exception handlers old_handlers = self._exception_handlers.copy() - async def _sentry_patched_exception_handler(self, *args, **kwargs): - # type: (Any, Any, Any) -> None + async def _sentry_patched_exception_handler( + self: Any, *args: Any, **kwargs: Any + ) -> None: integration = sentry_sdk.get_client().get_integration( StarletteIntegration ) @@ -281,8 +260,12 @@ async def _sentry_patched_exception_handler(self, *args, **kwargs): old_call = middleware_class.__call__ - async def _sentry_exceptionmiddleware_call(self, scope, receive, send): - # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None + async def _sentry_exceptionmiddleware_call( + self: Dict[str, Any], + scope: Dict[str, Any], + receive: Callable[[], Awaitable[Dict[str, Any]]], + send: Callable[[Dict[str, Any]], Awaitable[None]], + ) -> None: # Also add the user (that was eventually set by be Authentication middle # that was called before this middleware). This is done because the authentication # middleware sets the user in the scope and then (in the same function) @@ -301,8 +284,7 @@ async def _sentry_exceptionmiddleware_call(self, scope, receive, send): @ensure_integration_enabled(StarletteIntegration) -def _add_user_to_sentry_scope(scope): - # type: (Dict[str, Any]) -> None +def _add_user_to_sentry_scope(scope: Dict[str, Any]) -> None: """ Extracts user information from the ASGI scope and adds it to Sentry's scope. @@ -313,7 +295,7 @@ def _add_user_to_sentry_scope(scope): if not should_send_default_pii(): return - user_info = {} # type: Dict[str, Any] + user_info: Dict[str, Any] = {} starlette_user = scope["user"] username = getattr(starlette_user, "username", None) @@ -329,11 +311,10 @@ def _add_user_to_sentry_scope(scope): user_info.setdefault("email", starlette_user.email) sentry_scope = sentry_sdk.get_isolation_scope() - sentry_scope.user = user_info + sentry_scope.set_user(user_info) -def patch_authentication_middleware(middleware_class): - # type: (Any) -> None +def patch_authentication_middleware(middleware_class: Any) -> None: """ Add user information to Sentry scope. """ @@ -343,16 +324,19 @@ def patch_authentication_middleware(middleware_class): if not_yet_patched: - async def _sentry_authenticationmiddleware_call(self, scope, receive, send): - # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None + async def _sentry_authenticationmiddleware_call( + self: Dict[str, Any], + scope: Dict[str, Any], + receive: Callable[[], Awaitable[Dict[str, Any]]], + send: Callable[[Dict[str, Any]], Awaitable[None]], + ) -> None: await old_call(self, scope, receive, send) _add_user_to_sentry_scope(scope) middleware_class.__call__ = _sentry_authenticationmiddleware_call -def patch_middlewares(): - # type: () -> None +def patch_middlewares() -> None: """ Patches Starlettes `Middleware` class to record spans for every middleware invoked. @@ -363,8 +347,9 @@ def patch_middlewares(): if not_yet_patched: - def _sentry_middleware_init(self, cls, *args, **kwargs): - # type: (Any, Any, Any, Any) -> None + def _sentry_middleware_init( + self: Any, cls: Any, *args: Any, **kwargs: Any + ) -> None: if cls == SentryAsgiMiddleware: return old_middleware_init(self, cls, *args, **kwargs) @@ -380,15 +365,15 @@ def _sentry_middleware_init(self, cls, *args, **kwargs): Middleware.__init__ = _sentry_middleware_init -def patch_asgi_app(): - # type: () -> None +def patch_asgi_app() -> None: """ Instrument Starlette ASGI app using the SentryAsgiMiddleware. """ old_app = Starlette.__call__ - async def _sentry_patched_asgi_app(self, scope, receive, send): - # type: (Starlette, StarletteScope, Receive, Send) -> None + async def _sentry_patched_asgi_app( + self: Starlette, scope: StarletteScope, receive: Receive, send: Send + ) -> None: integration = sentry_sdk.get_client().get_integration(StarletteIntegration) if integration is None: return await old_app(self, scope, receive, send) @@ -413,8 +398,7 @@ async def _sentry_patched_asgi_app(self, scope, receive, send): # This was vendored in from Starlette to support Starlette 0.19.1 because # this function was only introduced in 0.20.x -def _is_async_callable(obj): - # type: (Any) -> bool +def _is_async_callable(obj: Any) -> bool: while isinstance(obj, functools.partial): obj = obj.func @@ -423,19 +407,16 @@ def _is_async_callable(obj): ) -def patch_request_response(): - # type: () -> None +def patch_request_response() -> None: old_request_response = starlette.routing.request_response - def _sentry_request_response(func): - # type: (Callable[[Any], Any]) -> ASGIApp + def _sentry_request_response(func: Callable[[Any], Any]) -> ASGIApp: old_func = func is_coroutine = _is_async_callable(old_func) if is_coroutine: - async def _sentry_async_func(*args, **kwargs): - # type: (*Any, **Any) -> Any + async def _sentry_async_func(*args: Any, **kwargs: Any) -> Any: integration = sentry_sdk.get_client().get_integration( StarletteIntegration ) @@ -454,10 +435,10 @@ async def _sentry_async_func(*args, **kwargs): extractor = StarletteRequestExtractor(request) info = await extractor.extract_request_info() - def _make_request_event_processor(req, integration): - # type: (Any, Any) -> Callable[[Event, dict[str, Any]], Event] - def event_processor(event, hint): - # type: (Event, Dict[str, Any]) -> Event + def _make_request_event_processor( + req: Any, integration: Any + ) -> Callable[[Event, dict[str, Any]], Event]: + def event_processor(event: Event, hint: Dict[str, Any]) -> Event: # Add info from request to event request_info = event.get("request", {}) @@ -484,8 +465,7 @@ def event_processor(event, hint): else: @functools.wraps(old_func) - def _sentry_sync_func(*args, **kwargs): - # type: (*Any, **Any) -> Any + def _sentry_sync_func(*args: Any, **kwargs: Any) -> Any: integration = sentry_sdk.get_client().get_integration( StarletteIntegration ) @@ -493,8 +473,8 @@ def _sentry_sync_func(*args, **kwargs): return old_func(*args, **kwargs) current_scope = sentry_sdk.get_current_scope() - if current_scope.transaction is not None: - current_scope.transaction.update_active_thread() + if current_scope.root_span is not None: + current_scope.root_span.update_active_thread() sentry_scope = sentry_sdk.get_isolation_scope() if sentry_scope.profile is not None: @@ -509,10 +489,10 @@ def _sentry_sync_func(*args, **kwargs): extractor = StarletteRequestExtractor(request) cookies = extractor.extract_cookies_from_request() - def _make_request_event_processor(req, integration): - # type: (Any, Any) -> Callable[[Event, dict[str, Any]], Event] - def event_processor(event, hint): - # type: (Event, dict[str, Any]) -> Event + def _make_request_event_processor( + req: Any, integration: Any + ) -> Callable[[Event, dict[str, Any]], Event]: + def event_processor(event: Event, hint: dict[str, Any]) -> Event: # Extract information from request request_info = event.get("request", {}) @@ -539,8 +519,7 @@ def event_processor(event, hint): starlette.routing.request_response = _sentry_request_response -def patch_templates(): - # type: () -> None +def patch_templates() -> None: # If markupsafe is not installed, then Jinja2 is not installed # (markupsafe is a dependency of Jinja2) @@ -560,10 +539,10 @@ def patch_templates(): if not_yet_patched: - def _sentry_jinja2templates_init(self, *args, **kwargs): - # type: (Jinja2Templates, *Any, **Any) -> None - def add_sentry_trace_meta(request): - # type: (Request) -> Dict[str, Any] + def _sentry_jinja2templates_init( + self: Jinja2Templates, *args: Any, **kwargs: Any + ) -> None: + def add_sentry_trace_meta(request: Request) -> Dict[str, Any]: trace_meta = Markup( sentry_sdk.get_current_scope().trace_propagation_meta() ) @@ -587,25 +566,26 @@ class StarletteRequestExtractor: (like form data or cookies) and adds it to the Sentry event. """ - request = None # type: Request + request: Request = None - def __init__(self, request): - # type: (StarletteRequestExtractor, Request) -> None + def __init__(self: StarletteRequestExtractor, request: Request) -> None: self.request = request - def extract_cookies_from_request(self): - # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]] - cookies = None # type: Optional[Dict[str, Any]] + def extract_cookies_from_request( + self: StarletteRequestExtractor, + ) -> Optional[Dict[str, Any]]: + cookies: Optional[Dict[str, Any]] = None if should_send_default_pii(): cookies = self.cookies() return cookies - async def extract_request_info(self): - # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]] + async def extract_request_info( + self: StarletteRequestExtractor, + ) -> Optional[Dict[str, Any]]: client = sentry_sdk.get_client() - request_info = {} # type: Dict[str, Any] + request_info: Dict[str, Any] = {} with capture_internal_exceptions(): # Add cookies @@ -649,19 +629,16 @@ async def extract_request_info(self): request_info["data"] = AnnotatedValue.removed_because_raw_data() return request_info - async def content_length(self): - # type: (StarletteRequestExtractor) -> Optional[int] + async def content_length(self: StarletteRequestExtractor) -> Optional[int]: if "content-length" in self.request.headers: return int(self.request.headers["content-length"]) return None - def cookies(self): - # type: (StarletteRequestExtractor) -> Dict[str, Any] + def cookies(self: StarletteRequestExtractor) -> Dict[str, Any]: return self.request.cookies - async def form(self): - # type: (StarletteRequestExtractor) -> Any + async def form(self: StarletteRequestExtractor) -> Any: if multipart is None: return None @@ -673,12 +650,10 @@ async def form(self): return await self.request.form() - def is_json(self): - # type: (StarletteRequestExtractor) -> bool + def is_json(self: StarletteRequestExtractor) -> bool: return _is_json_content_type(self.request.headers.get("content-type")) - async def json(self): - # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]] + async def json(self: StarletteRequestExtractor) -> Optional[Dict[str, Any]]: if not self.is_json(): return None try: @@ -687,8 +662,7 @@ async def json(self): return None -def _transaction_name_from_router(scope): - # type: (StarletteScope) -> Optional[str] +def _transaction_name_from_router(scope: StarletteScope) -> Optional[str]: router = scope.get("router") if not router: return None @@ -705,8 +679,9 @@ def _transaction_name_from_router(scope): return None -def _set_transaction_name_and_source(scope, transaction_style, request): - # type: (sentry_sdk.Scope, str, Any) -> None +def _set_transaction_name_and_source( + scope: sentry_sdk.Scope, transaction_style: str, request: Any +) -> None: name = None source = SOURCE_FOR_STYLE[transaction_style] @@ -728,8 +703,9 @@ def _set_transaction_name_and_source(scope, transaction_style, request): ) -def _get_transaction_from_middleware(app, asgi_scope, integration): - # type: (Any, Dict[str, Any], StarletteIntegration) -> Tuple[Optional[str], Optional[str]] +def _get_transaction_from_middleware( + app: Any, asgi_scope: Dict[str, Any], integration: StarletteIntegration +) -> Tuple[Optional[str], Optional[str]]: name = None source = None diff --git a/sentry_sdk/integrations/starlite.py b/sentry_sdk/integrations/starlite.py index 24707a18b1..24ed4b3aa0 100644 --- a/sentry_sdk/integrations/starlite.py +++ b/sentry_sdk/integrations/starlite.py @@ -1,9 +1,9 @@ +from __future__ import annotations import sentry_sdk -from sentry_sdk.consts import OP +from sentry_sdk.consts import OP, SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.utils import ( ensure_integration_enabled, event_from_exception, @@ -17,7 +17,7 @@ from starlite.plugins.base import get_plugin_for_value # type: ignore from starlite.routes.http import HTTPRoute # type: ignore from starlite.utils import ConnectionDataExtractor, is_async_callable, Ref # type: ignore - from pydantic import BaseModel # type: ignore + from pydantic import BaseModel except ImportError: raise DidNotEnable("Starlite is not installed") @@ -49,16 +49,16 @@ class StarliteIntegration(Integration): origin = f"auto.http.{identifier}" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: patch_app_init() patch_middlewares() patch_http_route_handle() class SentryStarliteASGIMiddleware(SentryAsgiMiddleware): - def __init__(self, app, span_origin=StarliteIntegration.origin): - # type: (ASGIApp, str) -> None + def __init__( + self, app: ASGIApp, span_origin: str = StarliteIntegration.origin + ) -> None: super().__init__( app=app, unsafe_context_data=False, @@ -68,8 +68,7 @@ def __init__(self, app, span_origin=StarliteIntegration.origin): ) -def patch_app_init(): - # type: () -> None +def patch_app_init() -> None: """ Replaces the Starlite class's `__init__` function in order to inject `after_exception` handlers and set the `SentryStarliteASGIMiddleware` as the outmost middleware in the stack. @@ -80,8 +79,7 @@ def patch_app_init(): old__init__ = Starlite.__init__ @ensure_integration_enabled(StarliteIntegration, old__init__) - def injection_wrapper(self, *args, **kwargs): - # type: (Starlite, *Any, **Any) -> None + def injection_wrapper(self: Starlite, *args: Any, **kwargs: Any) -> None: after_exception = kwargs.pop("after_exception", []) kwargs.update( after_exception=[ @@ -102,13 +100,11 @@ def injection_wrapper(self, *args, **kwargs): Starlite.__init__ = injection_wrapper -def patch_middlewares(): - # type: () -> None +def patch_middlewares() -> None: old_resolve_middleware_stack = BaseRouteHandler.resolve_middleware @ensure_integration_enabled(StarliteIntegration, old_resolve_middleware_stack) - def resolve_middleware_wrapper(self): - # type: (BaseRouteHandler) -> list[Middleware] + def resolve_middleware_wrapper(self: BaseRouteHandler) -> list[Middleware]: return [ enable_span_for_middleware(middleware) for middleware in old_resolve_middleware_stack(self) @@ -117,8 +113,7 @@ def resolve_middleware_wrapper(self): BaseRouteHandler.resolve_middleware = resolve_middleware_wrapper -def enable_span_for_middleware(middleware): - # type: (Middleware) -> Middleware +def enable_span_for_middleware(middleware: Middleware) -> Middleware: if ( not hasattr(middleware, "__call__") # noqa: B004 or middleware is SentryStarliteASGIMiddleware @@ -126,12 +121,13 @@ def enable_span_for_middleware(middleware): return middleware if isinstance(middleware, DefineMiddleware): - old_call = middleware.middleware.__call__ # type: ASGIApp + old_call: ASGIApp = middleware.middleware.__call__ else: old_call = middleware.__call__ - async def _create_span_call(self, scope, receive, send): - # type: (MiddlewareProtocol, StarliteScope, Receive, Send) -> None + async def _create_span_call( + self: MiddlewareProtocol, scope: StarliteScope, receive: Receive, send: Send + ) -> None: if sentry_sdk.get_client().get_integration(StarliteIntegration) is None: return await old_call(self, scope, receive, send) @@ -140,18 +136,21 @@ async def _create_span_call(self, scope, receive, send): op=OP.MIDDLEWARE_STARLITE, name=middleware_name, origin=StarliteIntegration.origin, + only_as_child_span=True, ) as middleware_span: middleware_span.set_tag("starlite.middleware_name", middleware_name) # Creating spans for the "receive" callback - async def _sentry_receive(*args, **kwargs): - # type: (*Any, **Any) -> Union[HTTPReceiveMessage, WebSocketReceiveMessage] + async def _sentry_receive( + *args: Any, **kwargs: Any + ) -> Union[HTTPReceiveMessage, WebSocketReceiveMessage]: if sentry_sdk.get_client().get_integration(StarliteIntegration) is None: return await receive(*args, **kwargs) with sentry_sdk.start_span( op=OP.MIDDLEWARE_STARLITE_RECEIVE, name=getattr(receive, "__qualname__", str(receive)), origin=StarliteIntegration.origin, + only_as_child_span=True, ) as span: span.set_tag("starlite.middleware_name", middleware_name) return await receive(*args, **kwargs) @@ -161,14 +160,14 @@ async def _sentry_receive(*args, **kwargs): new_receive = _sentry_receive if not receive_patched else receive # Creating spans for the "send" callback - async def _sentry_send(message): - # type: (Message) -> None + async def _sentry_send(message: Message) -> None: if sentry_sdk.get_client().get_integration(StarliteIntegration) is None: return await send(message) with sentry_sdk.start_span( op=OP.MIDDLEWARE_STARLITE_SEND, name=getattr(send, "__qualname__", str(send)), origin=StarliteIntegration.origin, + only_as_child_span=True, ) as span: span.set_tag("starlite.middleware_name", middleware_name) return await send(message) @@ -190,19 +189,19 @@ async def _sentry_send(message): return middleware -def patch_http_route_handle(): - # type: () -> None +def patch_http_route_handle() -> None: old_handle = HTTPRoute.handle - async def handle_wrapper(self, scope, receive, send): - # type: (HTTPRoute, HTTPScope, Receive, Send) -> None + async def handle_wrapper( + self: HTTPRoute, scope: HTTPScope, receive: Receive, send: Send + ) -> None: if sentry_sdk.get_client().get_integration(StarliteIntegration) is None: return await old_handle(self, scope, receive, send) sentry_scope = sentry_sdk.get_isolation_scope() - request = scope["app"].request_class( + request: Request[Any, Any] = scope["app"].request_class( scope=scope, receive=receive, send=send - ) # type: Request[Any, Any] + ) extracted_request_data = ConnectionDataExtractor( parse_body=True, parse_query=True )(request) @@ -210,8 +209,7 @@ async def handle_wrapper(self, scope, receive, send): request_data = await body - def event_processor(event, _): - # type: (Event, Hint) -> Event + def event_processor(event: Event, _: Hint) -> Event: route_handler = scope.get("route_handler") request_info = event.get("request", {}) @@ -254,8 +252,7 @@ def event_processor(event, _): HTTPRoute.handle = handle_wrapper -def retrieve_user_from_scope(scope): - # type: (StarliteScope) -> Optional[dict[str, Any]] +def retrieve_user_from_scope(scope: StarliteScope) -> Optional[dict[str, Any]]: scope_user = scope.get("user") if not scope_user: return None @@ -274,9 +271,8 @@ def retrieve_user_from_scope(scope): @ensure_integration_enabled(StarliteIntegration) -def exception_handler(exc, scope, _): - # type: (Exception, StarliteScope, State) -> None - user_info = None # type: Optional[dict[str, Any]] +def exception_handler(exc: Exception, scope: StarliteScope, _: State) -> None: + user_info: Optional[dict[str, Any]] = None if should_send_default_pii(): user_info = retrieve_user_from_scope(scope) if user_info and isinstance(user_info, dict): diff --git a/sentry_sdk/integrations/statsig.py b/sentry_sdk/integrations/statsig.py index 1d84eb8aa2..9a62e3d18f 100644 --- a/sentry_sdk/integrations/statsig.py +++ b/sentry_sdk/integrations/statsig.py @@ -1,3 +1,4 @@ +from __future__ import annotations from functools import wraps from typing import Any, TYPE_CHECKING @@ -19,8 +20,7 @@ class StatsigIntegration(Integration): identifier = "statsig" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: version = parse_version(STATSIG_VERSION) _check_minimum_version(StatsigIntegration, version, "statsig") @@ -28,8 +28,9 @@ def setup_once(): old_check_gate = statsig_module.check_gate @wraps(old_check_gate) - def sentry_check_gate(user, gate, *args, **kwargs): - # type: (StatsigUser, str, *Any, **Any) -> Any + def sentry_check_gate( + user: StatsigUser, gate: str, *args: Any, **kwargs: Any + ) -> Any: enabled = old_check_gate(user, gate, *args, **kwargs) add_feature_flag(gate, enabled) return enabled diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py index d388c5bca6..9f9a10da41 100644 --- a/sentry_sdk/integrations/stdlib.py +++ b/sentry_sdk/integrations/stdlib.py @@ -1,3 +1,4 @@ +from __future__ import annotations import os import subprocess import sys @@ -13,10 +14,13 @@ SENSITIVE_DATA_SUBSTITUTE, capture_internal_exceptions, ensure_integration_enabled, + get_current_thread_meta, + http_client_status_to_breadcrumb_level, is_sentry_url, logger, safe_repr, parse_url, + set_thread_info_from_span, ) from typing import TYPE_CHECKING @@ -31,25 +35,23 @@ from sentry_sdk._types import Event, Hint -_RUNTIME_CONTEXT = { +_RUNTIME_CONTEXT: dict[str, object] = { "name": platform.python_implementation(), "version": "%s.%s.%s" % (sys.version_info[:3]), "build": sys.version, -} # type: dict[str, object] +} class StdlibIntegration(Integration): identifier = "stdlib" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: _install_httplib() _install_subprocess() @add_global_event_processor - def add_python_runtime_context(event, hint): - # type: (Event, Hint) -> Optional[Event] + def add_python_runtime_context(event: Event, hint: Hint) -> Optional[Event]: if sentry_sdk.get_client().get_integration(StdlibIntegration) is not None: contexts = event.setdefault("contexts", {}) if isinstance(contexts, dict) and "runtime" not in contexts: @@ -58,20 +60,20 @@ def add_python_runtime_context(event, hint): return event -def _install_httplib(): - # type: () -> None +def _install_httplib() -> None: real_putrequest = HTTPConnection.putrequest real_getresponse = HTTPConnection.getresponse - def putrequest(self, method, url, *args, **kwargs): - # type: (HTTPConnection, str, str, *Any, **Any) -> Any + def putrequest( + self: HTTPConnection, method: str, url: str, *args: Any, **kwargs: Any + ) -> Any: host = self.host port = self.port default_port = self.default_port client = sentry_sdk.get_client() if client.get_integration(StdlibIntegration) is None or is_sentry_url( - client, host + client, f"{host}:{port}" # noqa: E231 ): return real_putrequest(self, method, url, *args, **kwargs) @@ -93,12 +95,22 @@ def putrequest(self, method, url, *args, **kwargs): name="%s %s" % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE), origin="auto.http.stdlib.httplib", + only_as_child_span=True, ) - span.set_data(SPANDATA.HTTP_METHOD, method) + span.__enter__() + + data = { + SPANDATA.HTTP_METHOD: method, + } + set_thread_info_from_span(data, span) + if parsed_url is not None: - span.set_data("url", parsed_url.url) - span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) - span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) + data["url"] = parsed_url.url + data[SPANDATA.HTTP_QUERY] = parsed_url.query + data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment + + for key, value in data.items(): + span.set_attribute(key, value) rv = real_putrequest(self, method, url, *args, **kwargs) @@ -117,11 +129,11 @@ def putrequest(self, method, url, *args, **kwargs): self.putheader(key, value) self._sentrysdk_span = span # type: ignore[attr-defined] + self._sentrysdk_span_data = data # type: ignore[attr-defined] return rv - def getresponse(self, *args, **kwargs): - # type: (HTTPConnection, *Any, **Any) -> Any + def getresponse(self: HTTPConnection, *args: Any, **kwargs: Any) -> Any: span = getattr(self, "_sentrysdk_span", None) if span is None: @@ -130,10 +142,22 @@ def getresponse(self, *args, **kwargs): try: rv = real_getresponse(self, *args, **kwargs) - span.set_http_status(int(rv.status)) - span.set_data("reason", rv.reason) + span_data = getattr(self, "_sentrysdk_span_data", {}) + span_data[SPANDATA.HTTP_STATUS_CODE] = int(rv.status) + span_data["reason"] = rv.reason + + status_code = int(rv.status) + span.set_http_status(status_code) + span.set_attribute("reason", rv.reason) + + sentry_sdk.add_breadcrumb( + type="http", + category="httplib", + data=span_data, + level=http_client_status_to_breadcrumb_level(status_code), + ) finally: - span.finish() + span.__exit__(None, None, None) return rv @@ -141,8 +165,13 @@ def getresponse(self, *args, **kwargs): HTTPConnection.getresponse = getresponse # type: ignore[method-assign] -def _init_argument(args, kwargs, name, position, setdefault_callback=None): - # type: (List[Any], Dict[Any, Any], str, int, Optional[Callable[[Any], Any]]) -> Any +def _init_argument( + args: List[Any], + kwargs: Dict[Any, Any], + name: str, + position: int, + setdefault_callback: Optional[Callable[[Any], Any]] = None, +) -> Any: """ given (*args, **kwargs) of a function call, retrieve (and optionally set a default for) an argument by either name or position. @@ -172,13 +201,13 @@ def _init_argument(args, kwargs, name, position, setdefault_callback=None): return rv -def _install_subprocess(): - # type: () -> None +def _install_subprocess() -> None: old_popen_init = subprocess.Popen.__init__ @ensure_integration_enabled(StdlibIntegration, old_popen_init) - def sentry_patched_popen_init(self, *a, **kw): - # type: (subprocess.Popen[Any], *Any, **Any) -> None + def sentry_patched_popen_init( + self: subprocess.Popen[Any], *a: Any, **kw: Any + ) -> None: # Convert from tuple to list to be able to set values. a = list(a) @@ -207,6 +236,7 @@ def sentry_patched_popen_init(self, *a, **kw): op=OP.SUBPROCESS, name=description, origin="auto.subprocess.stdlib.subprocess", + only_as_child_span=True, ) as span: for k, v in sentry_sdk.get_current_scope().iter_trace_propagation_headers( span=span @@ -222,11 +252,29 @@ def sentry_patched_popen_init(self, *a, **kw): env["SUBPROCESS_" + k.upper().replace("-", "_")] = v if cwd: - span.set_data("subprocess.cwd", cwd) + span.set_attribute("subprocess.cwd", cwd) rv = old_popen_init(self, *a, **kw) span.set_tag("subprocess.pid", self.pid) + + with capture_internal_exceptions(): + thread_id, thread_name = get_current_thread_meta() + breadcrumb_data = { + "subprocess.pid": self.pid, + SPANDATA.THREAD_ID: thread_id, + SPANDATA.THREAD_NAME: thread_name, + } + if cwd: + breadcrumb_data["subprocess.cwd"] = cwd + + sentry_sdk.add_breadcrumb( + type="subprocess", + category="subprocess", + message=description, + data=breadcrumb_data, + ) + return rv subprocess.Popen.__init__ = sentry_patched_popen_init # type: ignore @@ -234,11 +282,13 @@ def sentry_patched_popen_init(self, *a, **kw): old_popen_wait = subprocess.Popen.wait @ensure_integration_enabled(StdlibIntegration, old_popen_wait) - def sentry_patched_popen_wait(self, *a, **kw): - # type: (subprocess.Popen[Any], *Any, **Any) -> Any + def sentry_patched_popen_wait( + self: subprocess.Popen[Any], *a: Any, **kw: Any + ) -> Any: with sentry_sdk.start_span( op=OP.SUBPROCESS_WAIT, origin="auto.subprocess.stdlib.subprocess", + only_as_child_span=True, ) as span: span.set_tag("subprocess.pid", self.pid) return old_popen_wait(self, *a, **kw) @@ -248,11 +298,13 @@ def sentry_patched_popen_wait(self, *a, **kw): old_popen_communicate = subprocess.Popen.communicate @ensure_integration_enabled(StdlibIntegration, old_popen_communicate) - def sentry_patched_popen_communicate(self, *a, **kw): - # type: (subprocess.Popen[Any], *Any, **Any) -> Any + def sentry_patched_popen_communicate( + self: subprocess.Popen[Any], *a: Any, **kw: Any + ) -> Any: with sentry_sdk.start_span( op=OP.SUBPROCESS_COMMUNICATE, origin="auto.subprocess.stdlib.subprocess", + only_as_child_span=True, ) as span: span.set_tag("subprocess.pid", self.pid) return old_popen_communicate(self, *a, **kw) @@ -260,6 +312,5 @@ def sentry_patched_popen_communicate(self, *a, **kw): subprocess.Popen.communicate = sentry_patched_popen_communicate # type: ignore -def get_subprocess_traceparent_headers(): - # type: () -> EnvironHeaders +def get_subprocess_traceparent_headers() -> EnvironHeaders: return EnvironHeaders(os.environ, prefix="SUBPROCESS_") diff --git a/sentry_sdk/integrations/strawberry.py b/sentry_sdk/integrations/strawberry.py index ae7d273079..4817d304f7 100644 --- a/sentry_sdk/integrations/strawberry.py +++ b/sentry_sdk/integrations/strawberry.py @@ -1,3 +1,4 @@ +from __future__ import annotations import functools import hashlib from inspect import isawaitable @@ -62,8 +63,7 @@ class StrawberryIntegration(Integration): identifier = "strawberry" origin = f"auto.graphql.{identifier}" - def __init__(self, async_execution=None): - # type: (Optional[bool]) -> None + def __init__(self, async_execution: Optional[bool] = None) -> None: if async_execution not in (None, False, True): raise ValueError( 'Invalid value for async_execution: "{}" (must be bool)'.format( @@ -73,8 +73,7 @@ def __init__(self, async_execution=None): self.async_execution = async_execution @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: version = package_version("strawberry-graphql") _check_minimum_version(StrawberryIntegration, version, "strawberry-graphql") @@ -82,13 +81,11 @@ def setup_once(): _patch_views() -def _patch_schema_init(): - # type: () -> None +def _patch_schema_init() -> None: old_schema_init = Schema.__init__ @functools.wraps(old_schema_init) - def _sentry_patched_schema_init(self, *args, **kwargs): - # type: (Schema, Any, Any) -> None + def _sentry_patched_schema_init(self: Schema, *args: Any, **kwargs: Any) -> None: integration = sentry_sdk.get_client().get_integration(StrawberryIntegration) if integration is None: return old_schema_init(self, *args, **kwargs) @@ -107,14 +104,6 @@ def _sentry_patched_schema_init(self, *args, **kwargs): "False" if should_use_async_extension else "True", ) - # remove the built in strawberry sentry extension, if present - extensions = [ - extension - for extension in extensions - if extension - not in (StrawberrySentryAsyncExtension, StrawberrySentrySyncExtension) - ] - # add our extension extensions.append( SentryAsyncExtension if should_use_async_extension else SentrySyncExtension @@ -129,17 +118,15 @@ def _sentry_patched_schema_init(self, *args, **kwargs): class SentryAsyncExtension(SchemaExtension): def __init__( - self, + self: Any, *, - execution_context=None, - ): - # type: (Any, Optional[ExecutionContext]) -> None + execution_context: Optional[ExecutionContext] = None, + ) -> None: if execution_context: self.execution_context = execution_context @cached_property - def _resource_name(self): - # type: () -> str + def _resource_name(self) -> str: query_hash = self.hash_query(self.execution_context.query) # type: ignore if self.execution_context.operation_name: @@ -147,12 +134,10 @@ def _resource_name(self): return query_hash - def hash_query(self, query): - # type: (str) -> str + def hash_query(self, query: str) -> str: return hashlib.md5(query.encode("utf-8")).hexdigest() - def on_operation(self): - # type: () -> Generator[None, None, None] + def on_operation(self) -> Generator[None, None, None]: self._operation_name = self.execution_context.operation_name operation_type = "query" @@ -184,65 +169,66 @@ def on_operation(self): event_processor = _make_request_event_processor(self.execution_context) scope.add_event_processor(event_processor) - span = sentry_sdk.get_current_span() - if span: - self.graphql_span = span.start_child( - op=op, - name=description, - origin=StrawberryIntegration.origin, - ) - else: - self.graphql_span = sentry_sdk.start_span( - op=op, - name=description, - origin=StrawberryIntegration.origin, - ) + with sentry_sdk.start_span( + op=op, + name=description, + origin=StrawberryIntegration.origin, + only_as_child_span=True, + ) as graphql_span: + graphql_span.set_attribute("graphql.operation.type", operation_type) + graphql_span.set_attribute("graphql.document", self.execution_context.query) + graphql_span.set_attribute("graphql.resource_name", self._resource_name) - self.graphql_span.set_data("graphql.operation.type", operation_type) - self.graphql_span.set_data("graphql.operation.name", self._operation_name) - self.graphql_span.set_data("graphql.document", self.execution_context.query) - self.graphql_span.set_data("graphql.resource_name", self._resource_name) + yield - yield + # we might have a more accurate operation_name after the parsing + self._operation_name = self.execution_context.operation_name - transaction = self.graphql_span.containing_transaction - if transaction and self.execution_context.operation_name: - transaction.name = self.execution_context.operation_name - transaction.source = TransactionSource.COMPONENT - transaction.op = op + if self._operation_name is not None: + graphql_span.set_attribute( + "graphql.operation.name", self._operation_name + ) - self.graphql_span.finish() + sentry_sdk.get_current_scope().set_transaction_name( + self._operation_name, + source=TransactionSource.COMPONENT, + ) - def on_validate(self): - # type: () -> Generator[None, None, None] - self.validation_span = self.graphql_span.start_child( + root_span = graphql_span.root_span + if root_span: + root_span.op = op + + def on_validate(self) -> Generator[None, None, None]: + with sentry_sdk.start_span( op=OP.GRAPHQL_VALIDATE, name="validation", origin=StrawberryIntegration.origin, - ) + ): + yield - yield - - self.validation_span.finish() - - def on_parse(self): - # type: () -> Generator[None, None, None] - self.parsing_span = self.graphql_span.start_child( + def on_parse(self) -> Generator[None, None, None]: + with sentry_sdk.start_span( op=OP.GRAPHQL_PARSE, name="parsing", origin=StrawberryIntegration.origin, - ) - - yield - - self.parsing_span.finish() + ): + yield - def should_skip_tracing(self, _next, info): - # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], GraphQLResolveInfo) -> bool + def should_skip_tracing( + self, + _next: Callable[[Any, GraphQLResolveInfo, Any, Any], Any], + info: GraphQLResolveInfo, + ) -> bool: return strawberry_should_skip_tracing(_next, info) - async def _resolve(self, _next, root, info, *args, **kwargs): - # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any + async def _resolve( + self, + _next: Callable[[Any, GraphQLResolveInfo, Any, Any], Any], + root: Any, + info: GraphQLResolveInfo, + *args: str, + **kwargs: Any, + ) -> Any: result = _next(root, info, *args, **kwargs) if isawaitable(result): @@ -250,65 +236,79 @@ async def _resolve(self, _next, root, info, *args, **kwargs): return result - async def resolve(self, _next, root, info, *args, **kwargs): - # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any + async def resolve( + self, + _next: Callable[[Any, GraphQLResolveInfo, Any, Any], Any], + root: Any, + info: GraphQLResolveInfo, + *args: str, + **kwargs: Any, + ) -> Any: if self.should_skip_tracing(_next, info): return await self._resolve(_next, root, info, *args, **kwargs) field_path = "{}.{}".format(info.parent_type, info.field_name) - with self.graphql_span.start_child( + with sentry_sdk.start_span( op=OP.GRAPHQL_RESOLVE, name="resolving {}".format(field_path), origin=StrawberryIntegration.origin, ) as span: - span.set_data("graphql.field_name", info.field_name) - span.set_data("graphql.parent_type", info.parent_type.name) - span.set_data("graphql.field_path", field_path) - span.set_data("graphql.path", ".".join(map(str, info.path.as_list()))) + span.set_attribute("graphql.field_name", info.field_name) + span.set_attribute("graphql.parent_type", info.parent_type.name) + span.set_attribute("graphql.field_path", field_path) + span.set_attribute("graphql.path", ".".join(map(str, info.path.as_list()))) return await self._resolve(_next, root, info, *args, **kwargs) class SentrySyncExtension(SentryAsyncExtension): - def resolve(self, _next, root, info, *args, **kwargs): - # type: (Callable[[Any, Any, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any + def resolve( + self, + _next: Callable[[Any, Any, Any, Any], Any], + root: Any, + info: GraphQLResolveInfo, + *args: str, + **kwargs: Any, + ) -> Any: if self.should_skip_tracing(_next, info): return _next(root, info, *args, **kwargs) field_path = "{}.{}".format(info.parent_type, info.field_name) - with self.graphql_span.start_child( + with sentry_sdk.start_span( op=OP.GRAPHQL_RESOLVE, name="resolving {}".format(field_path), origin=StrawberryIntegration.origin, ) as span: - span.set_data("graphql.field_name", info.field_name) - span.set_data("graphql.parent_type", info.parent_type.name) - span.set_data("graphql.field_path", field_path) - span.set_data("graphql.path", ".".join(map(str, info.path.as_list()))) + span.set_attribute("graphql.field_name", info.field_name) + span.set_attribute("graphql.parent_type", info.parent_type.name) + span.set_attribute("graphql.field_path", field_path) + span.set_attribute("graphql.path", ".".join(map(str, info.path.as_list()))) return _next(root, info, *args, **kwargs) -def _patch_views(): - # type: () -> None +def _patch_views() -> None: old_async_view_handle_errors = async_base_view.AsyncBaseHTTPView._handle_errors old_sync_view_handle_errors = sync_base_view.SyncBaseHTTPView._handle_errors - def _sentry_patched_async_view_handle_errors(self, errors, response_data): - # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None + def _sentry_patched_async_view_handle_errors( + self: Any, errors: List[GraphQLError], response_data: GraphQLHTTPResponse + ) -> None: old_async_view_handle_errors(self, errors, response_data) _sentry_patched_handle_errors(self, errors, response_data) - def _sentry_patched_sync_view_handle_errors(self, errors, response_data): - # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None + def _sentry_patched_sync_view_handle_errors( + self: Any, errors: List[GraphQLError], response_data: GraphQLHTTPResponse + ) -> None: old_sync_view_handle_errors(self, errors, response_data) _sentry_patched_handle_errors(self, errors, response_data) @ensure_integration_enabled(StrawberryIntegration) - def _sentry_patched_handle_errors(self, errors, response_data): - # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None + def _sentry_patched_handle_errors( + self: Any, errors: List[GraphQLError], response_data: GraphQLHTTPResponse + ) -> None: if not errors: return @@ -336,18 +336,18 @@ def _sentry_patched_handle_errors(self, errors, response_data): ) -def _make_request_event_processor(execution_context): - # type: (ExecutionContext) -> EventProcessor +def _make_request_event_processor( + execution_context: ExecutionContext, +) -> EventProcessor: - def inner(event, hint): - # type: (Event, dict[str, Any]) -> Event + def inner(event: Event, hint: dict[str, Any]) -> Event: with capture_internal_exceptions(): if should_send_default_pii(): request_data = event.setdefault("request", {}) request_data["api_target"] = "graphql" if not request_data.get("data"): - data = {"query": execution_context.query} # type: dict[str, Any] + data: dict[str, Any] = {"query": execution_context.query} if execution_context.variables: data["variables"] = execution_context.variables if execution_context.operation_name: @@ -366,11 +366,11 @@ def inner(event, hint): return inner -def _make_response_event_processor(response_data): - # type: (GraphQLHTTPResponse) -> EventProcessor +def _make_response_event_processor( + response_data: GraphQLHTTPResponse, +) -> EventProcessor: - def inner(event, hint): - # type: (Event, dict[str, Any]) -> Event + def inner(event: Event, hint: dict[str, Any]) -> Event: with capture_internal_exceptions(): if should_send_default_pii(): contexts = event.setdefault("contexts", {}) @@ -381,13 +381,7 @@ def inner(event, hint): return inner -def _guess_if_using_async(extensions): - # type: (List[SchemaExtension]) -> bool - if StrawberrySentryAsyncExtension in extensions: - return True - elif StrawberrySentrySyncExtension in extensions: - return False - +def _guess_if_using_async(extensions: List[SchemaExtension]) -> bool: return bool( {"starlette", "starlite", "litestar", "fastapi"} & set(_get_installed_modules()) ) diff --git a/sentry_sdk/integrations/sys_exit.py b/sentry_sdk/integrations/sys_exit.py index 2341e11359..ff1a97d5b6 100644 --- a/sentry_sdk/integrations/sys_exit.py +++ b/sentry_sdk/integrations/sys_exit.py @@ -1,3 +1,4 @@ +from __future__ import annotations import functools import sys @@ -24,23 +25,19 @@ class SysExitIntegration(Integration): identifier = "sys_exit" - def __init__(self, *, capture_successful_exits=False): - # type: (bool) -> None + def __init__(self, *, capture_successful_exits: bool = False) -> None: self._capture_successful_exits = capture_successful_exits @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: SysExitIntegration._patch_sys_exit() @staticmethod - def _patch_sys_exit(): - # type: () -> None - old_exit = sys.exit # type: Callable[[Union[str, int, None]], NoReturn] + def _patch_sys_exit() -> None: + old_exit: Callable[[Union[str, int, None]], NoReturn] = sys.exit @functools.wraps(old_exit) - def sentry_patched_exit(__status=0): - # type: (Union[str, int, None]) -> NoReturn + def sentry_patched_exit(__status: Union[str, int, None] = 0) -> NoReturn: # @ensure_integration_enabled ensures that this is non-None integration = sentry_sdk.get_client().get_integration(SysExitIntegration) if integration is None: @@ -60,8 +57,7 @@ def sentry_patched_exit(__status=0): sys.exit = sentry_patched_exit -def _capture_exception(exc): - # type: (SystemExit) -> None +def _capture_exception(exc: SystemExit) -> None: event, hint = event_from_exception( exc, client_options=sentry_sdk.get_client().options, diff --git a/sentry_sdk/integrations/threading.py b/sentry_sdk/integrations/threading.py index fc4f539228..12b2e31313 100644 --- a/sentry_sdk/integrations/threading.py +++ b/sentry_sdk/integrations/threading.py @@ -1,15 +1,16 @@ +from __future__ import annotations import sys import warnings from functools import wraps from threading import Thread, current_thread import sentry_sdk +from sentry_sdk import Scope +from sentry_sdk.scope import ScopeType from sentry_sdk.integrations import Integration -from sentry_sdk.scope import use_isolation_scope, use_scope from sentry_sdk.utils import ( event_from_exception, capture_internal_exceptions, - logger, reraise, ) @@ -19,7 +20,6 @@ from typing import Any from typing import TypeVar from typing import Callable - from typing import Optional from sentry_sdk._types import ExcInfo @@ -29,25 +29,11 @@ class ThreadingIntegration(Integration): identifier = "threading" - def __init__(self, propagate_hub=None, propagate_scope=True): - # type: (Optional[bool], bool) -> None - if propagate_hub is not None: - logger.warning( - "Deprecated: propagate_hub is deprecated. This will be removed in the future." - ) - - # Note: propagate_hub did not have any effect on propagation of scope data - # scope data was always propagated no matter what the value of propagate_hub was - # This is why the default for propagate_scope is True - + def __init__(self, propagate_scope: bool = True) -> None: self.propagate_scope = propagate_scope - if propagate_hub is not None: - self.propagate_scope = propagate_hub - @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: old_start = Thread.start try: @@ -60,8 +46,7 @@ def setup_once(): channels_version = None @wraps(old_start) - def sentry_start(self, *a, **kw): - # type: (Thread, *Any, **Any) -> Any + def sentry_start(self: Thread, *a: Any, **kw: Any) -> Any: integration = sentry_sdk.get_client().get_integration(ThreadingIntegration) if integration is None: return old_start(self, *a, **kw) @@ -89,8 +74,8 @@ def sentry_start(self, *a, **kw): isolation_scope = sentry_sdk.get_isolation_scope().fork() current_scope = sentry_sdk.get_current_scope().fork() else: - isolation_scope = None - current_scope = None + isolation_scope = Scope(ty=ScopeType.ISOLATION) + current_scope = Scope(ty=ScopeType.CURRENT) # Patching instance methods in `start()` creates a reference cycle if # done in a naive way. See @@ -111,31 +96,28 @@ def sentry_start(self, *a, **kw): Thread.start = sentry_start # type: ignore -def _wrap_run(isolation_scope_to_use, current_scope_to_use, old_run_func): - # type: (Optional[sentry_sdk.Scope], Optional[sentry_sdk.Scope], F) -> F +def _wrap_run( + isolation_scope_to_use: sentry_sdk.Scope, + current_scope_to_use: sentry_sdk.Scope, + old_run_func: F, +) -> F: @wraps(old_run_func) - def run(*a, **kw): - # type: (*Any, **Any) -> Any - def _run_old_run_func(): - # type: () -> Any + def run(*a: Any, **kw: Any) -> Any: + def _run_old_run_func() -> Any: try: self = current_thread() return old_run_func(self, *a[1:], **kw) except Exception: reraise(*_capture_exception()) - if isolation_scope_to_use is not None and current_scope_to_use is not None: - with use_isolation_scope(isolation_scope_to_use): - with use_scope(current_scope_to_use): - return _run_old_run_func() - else: - return _run_old_run_func() + with sentry_sdk.use_isolation_scope(isolation_scope_to_use): + with sentry_sdk.use_scope(current_scope_to_use): + return _run_old_run_func() return run # type: ignore -def _capture_exception(): - # type: () -> ExcInfo +def _capture_exception() -> ExcInfo: exc_info = sys.exc_info() client = sentry_sdk.get_client() diff --git a/sentry_sdk/integrations/tornado.py b/sentry_sdk/integrations/tornado.py index 83fe5e94e8..cb5ceab061 100644 --- a/sentry_sdk/integrations/tornado.py +++ b/sentry_sdk/integrations/tornado.py @@ -1,9 +1,9 @@ +from __future__ import annotations import weakref import contextlib from inspect import iscoroutinefunction import sentry_sdk -from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import TransactionSource @@ -20,13 +20,15 @@ RequestExtractor, _filter_headers, _is_json_content_type, + _request_headers_to_span_attributes, ) from sentry_sdk.integrations.logging import ignore_logger try: from tornado import version_info as TORNADO_VERSION - from tornado.web import RequestHandler, HTTPError from tornado.gen import coroutine + from tornado.httputil import HTTPServerRequest + from tornado.web import RequestHandler, HTTPError except ImportError: raise DidNotEnable("Tornado not installed") @@ -42,13 +44,20 @@ from sentry_sdk._types import Event, EventProcessor +REQUEST_PROPERTY_TO_ATTRIBUTE = { + "method": "http.request.method", + "path": "url.path", + "query": "url.query", + "protocol": "url.scheme", +} + + class TornadoIntegration(Integration): identifier = "tornado" origin = f"auto.http.{identifier}" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: _check_minimum_version(TornadoIntegration, TORNADO_VERSION) if not HAS_REAL_CONTEXTVARS: @@ -68,16 +77,18 @@ def setup_once(): if awaitable: # Starting Tornado 6 RequestHandler._execute method is a standard Python coroutine (async/await) # In that case our method should be a coroutine function too - async def sentry_execute_request_handler(self, *args, **kwargs): - # type: (RequestHandler, *Any, **Any) -> Any + async def sentry_execute_request_handler( + self: RequestHandler, *args: Any, **kwargs: Any + ) -> Any: with _handle_request_impl(self): return await old_execute(self, *args, **kwargs) else: @coroutine # type: ignore - def sentry_execute_request_handler(self, *args, **kwargs): - # type: (RequestHandler, *Any, **Any) -> Any + def sentry_execute_request_handler( + self: RequestHandler, *args: Any, **kwargs: Any + ) -> Any: with _handle_request_impl(self): result = yield from old_execute(self, *args, **kwargs) return result @@ -86,8 +97,14 @@ def sentry_execute_request_handler(self, *args, **kwargs): old_log_exception = RequestHandler.log_exception - def sentry_log_exception(self, ty, value, tb, *args, **kwargs): - # type: (Any, type, BaseException, Any, *Any, **Any) -> Optional[Any] + def sentry_log_exception( + self: Any, + ty: type, + value: BaseException, + tb: Any, + *args: Any, + **kwargs: Any, + ) -> Optional[Any]: _capture_exception(ty, value, tb) return old_log_exception(self, ty, value, tb, *args, **kwargs) @@ -95,8 +112,7 @@ def sentry_log_exception(self, ty, value, tb, *args, **kwargs): @contextlib.contextmanager -def _handle_request_impl(self): - # type: (RequestHandler) -> Generator[None, None, None] +def _handle_request_impl(self: RequestHandler) -> Generator[None, None, None]: integration = sentry_sdk.get_client().get_integration(TornadoIntegration) if integration is None: @@ -111,27 +127,23 @@ def _handle_request_impl(self): processor = _make_event_processor(weak_handler) scope.add_event_processor(processor) - transaction = continue_trace( - headers, - op=OP.HTTP_SERVER, - # Like with all other integrations, this is our - # fallback transaction in case there is no route. - # sentry_urldispatcher_resolve is responsible for - # setting a transaction name later. - name="generic Tornado request", - source=TransactionSource.ROUTE, - origin=TornadoIntegration.origin, - ) - - with sentry_sdk.start_transaction( - transaction, custom_sampling_context={"tornado_request": self.request} - ): - yield + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span( + op=OP.HTTP_SERVER, + # Like with all other integrations, this is our + # fallback transaction in case there is no route. + # sentry_urldispatcher_resolve is responsible for + # setting a transaction name later. + name="generic Tornado request", + source=TransactionSource.ROUTE, + origin=TornadoIntegration.origin, + attributes=_prepopulate_attributes(self.request), + ): + yield @ensure_integration_enabled(TornadoIntegration) -def _capture_exception(ty, value, tb): - # type: (type, BaseException, Any) -> None +def _capture_exception(ty: type, value: BaseException, tb: Any) -> None: if isinstance(value, HTTPError): return @@ -144,10 +156,8 @@ def _capture_exception(ty, value, tb): sentry_sdk.capture_event(event, hint=hint) -def _make_event_processor(weak_handler): - # type: (Callable[[], RequestHandler]) -> EventProcessor - def tornado_processor(event, hint): - # type: (Event, dict[str, Any]) -> Event +def _make_event_processor(weak_handler: Callable[[], RequestHandler]) -> EventProcessor: + def tornado_processor(event: Event, hint: dict[str, Any]) -> Event: handler = weak_handler() if handler is None: return event @@ -186,35 +196,60 @@ def tornado_processor(event, hint): class TornadoRequestExtractor(RequestExtractor): - def content_length(self): - # type: () -> int + def content_length(self) -> int: if self.request.body is None: return 0 return len(self.request.body) - def cookies(self): - # type: () -> Dict[str, str] + def cookies(self) -> Dict[str, str]: return {k: v.value for k, v in self.request.cookies.items()} - def raw_data(self): - # type: () -> bytes + def raw_data(self) -> bytes: return self.request.body - def form(self): - # type: () -> Dict[str, Any] + def form(self) -> Dict[str, Any]: return { k: [v.decode("latin1", "replace") for v in vs] for k, vs in self.request.body_arguments.items() } - def is_json(self): - # type: () -> bool + def is_json(self) -> bool: return _is_json_content_type(self.request.headers.get("content-type")) - def files(self): - # type: () -> Dict[str, Any] + def files(self) -> Dict[str, Any]: return {k: v[0] for k, v in self.request.files.items() if v} - def size_of_file(self, file): - # type: (Any) -> int + def size_of_file(self, file: Any) -> int: return len(file.body or ()) + + +def _prepopulate_attributes(request: HTTPServerRequest) -> dict[str, Any]: + # https://www.tornadoweb.org/en/stable/httputil.html#tornado.httputil.HTTPServerRequest + attributes = {} + + for prop, attr in REQUEST_PROPERTY_TO_ATTRIBUTE.items(): + if getattr(request, prop, None) is not None: + attributes[attr] = getattr(request, prop) + + if getattr(request, "version", None): + try: + proto, version = request.version.split("/") + attributes["network.protocol.name"] = proto + attributes["network.protocol.version"] = version + except ValueError: + attributes["network.protocol.name"] = request.version + + if getattr(request, "host", None): + try: + address, port = request.host.split(":") + attributes["server.address"] = address + attributes["server.port"] = port + except ValueError: + attributes["server.address"] = request.host + + with capture_internal_exceptions(): + attributes["url.full"] = request.full_url() + + attributes.update(_request_headers_to_span_attributes(request.headers)) + + return attributes diff --git a/sentry_sdk/integrations/trytond.py b/sentry_sdk/integrations/trytond.py index 2c44c593a4..91ed51180c 100644 --- a/sentry_sdk/integrations/trytond.py +++ b/sentry_sdk/integrations/trytond.py @@ -1,8 +1,10 @@ +from __future__ import annotations import sentry_sdk -from sentry_sdk.integrations import Integration +from sentry_sdk.integrations import _check_minimum_version, Integration from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware from sentry_sdk.utils import ensure_integration_enabled, event_from_exception +from trytond import __version__ as trytond_version # type: ignore from trytond.exceptions import TrytonException # type: ignore from trytond.wsgi import app # type: ignore @@ -14,18 +16,20 @@ class TrytondWSGIIntegration(Integration): identifier = "trytond_wsgi" origin = f"auto.http.{identifier}" - def __init__(self): # type: () -> None + def __init__(self) -> None: pass @staticmethod - def setup_once(): # type: () -> None + def setup_once() -> None: + _check_minimum_version(TrytondWSGIIntegration, trytond_version) + app.wsgi_app = SentryWsgiMiddleware( app.wsgi_app, span_origin=TrytondWSGIIntegration.origin, ) @ensure_integration_enabled(TrytondWSGIIntegration) - def error_handler(e): # type: (Exception) -> None + def error_handler(e: Exception) -> None: if isinstance(e, TrytonException): return else: diff --git a/sentry_sdk/integrations/typer.py b/sentry_sdk/integrations/typer.py index 8879d6d0d0..ab3a22a6ff 100644 --- a/sentry_sdk/integrations/typer.py +++ b/sentry_sdk/integrations/typer.py @@ -1,3 +1,4 @@ +from __future__ import annotations import sentry_sdk from sentry_sdk.utils import ( capture_internal_exceptions, @@ -30,15 +31,16 @@ class TyperIntegration(Integration): identifier = "typer" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: typer.main.except_hook = _make_excepthook(typer.main.except_hook) # type: ignore -def _make_excepthook(old_excepthook): - # type: (Excepthook) -> Excepthook - def sentry_sdk_excepthook(type_, value, traceback): - # type: (Type[BaseException], BaseException, Optional[TracebackType]) -> None +def _make_excepthook(old_excepthook: Excepthook) -> Excepthook: + def sentry_sdk_excepthook( + type_: Type[BaseException], + value: BaseException, + traceback: Optional[TracebackType], + ) -> None: integration = sentry_sdk.get_client().get_integration(TyperIntegration) # Note: If we replace this with ensure_integration_enabled then diff --git a/sentry_sdk/integrations/unleash.py b/sentry_sdk/integrations/unleash.py index 6daa0a411f..6dc63cc5a8 100644 --- a/sentry_sdk/integrations/unleash.py +++ b/sentry_sdk/integrations/unleash.py @@ -1,3 +1,4 @@ +from __future__ import annotations from functools import wraps from typing import Any @@ -14,14 +15,14 @@ class UnleashIntegration(Integration): identifier = "unleash" @staticmethod - def setup_once(): - # type: () -> None + def setup_once() -> None: # Wrap and patch evaluation methods (class methods) old_is_enabled = UnleashClient.is_enabled @wraps(old_is_enabled) - def sentry_is_enabled(self, feature, *args, **kwargs): - # type: (UnleashClient, str, *Any, **Any) -> Any + def sentry_is_enabled( + self: UnleashClient, feature: str, *args: Any, **kwargs: Any + ) -> Any: enabled = old_is_enabled(self, feature, *args, **kwargs) # We have no way of knowing what type of unleash feature this is, so we have to treat diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index e628e50e69..37f6946dd4 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -1,19 +1,18 @@ +from __future__ import annotations import sys from functools import partial import sentry_sdk from sentry_sdk._werkzeug import get_host, _get_headers -from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.scope import should_send_default_pii from sentry_sdk.integrations._wsgi_common import ( DEFAULT_HTTP_METHODS_TO_CAPTURE, _filter_headers, - nullcontext, + _request_headers_to_span_attributes, ) from sentry_sdk.sessions import track_session -from sentry_sdk.scope import use_isolation_scope -from sentry_sdk.tracing import Transaction, TransactionSource +from sentry_sdk.tracing import Span, TransactionSource from sentry_sdk.utils import ( ContextVar, capture_internal_exceptions, @@ -27,35 +26,48 @@ from typing import Callable from typing import Dict from typing import Iterator + from typing import Iterable from typing import Any from typing import Tuple + from typing import List from typing import Optional - from typing import TypeVar from typing import Protocol from sentry_sdk.utils import ExcInfo from sentry_sdk._types import Event, EventProcessor - WsgiResponseIter = TypeVar("WsgiResponseIter") - WsgiResponseHeaders = TypeVar("WsgiResponseHeaders") - WsgiExcInfo = TypeVar("WsgiExcInfo") + WsgiResponseIter = Iterable[bytes] + WsgiResponseHeaders = List[Tuple[str, str]] class StartResponse(Protocol): - def __call__(self, status, response_headers, exc_info=None): # type: ignore - # type: (str, WsgiResponseHeaders, Optional[WsgiExcInfo]) -> WsgiResponseIter + def __call__( + self, + status: str, + response_headers: WsgiResponseHeaders, + exc_info: Optional[ExcInfo] = None, + ) -> WsgiResponseIter: pass _wsgi_middleware_applied = ContextVar("sentry_wsgi_middleware_applied") +DEFAULT_TRANSACTION_NAME = "generic WSGI request" -def wsgi_decoding_dance(s, charset="utf-8", errors="replace"): - # type: (str, str, str) -> str +ENVIRON_TO_ATTRIBUTE = { + "PATH_INFO": "url.path", + "QUERY_STRING": "url.query", + "REQUEST_METHOD": "http.request.method", + "SERVER_NAME": "server.address", + "SERVER_PORT": "server.port", + "wsgi.url_scheme": "url.scheme", +} + + +def wsgi_decoding_dance(s: str, charset: str = "utf-8", errors: str = "replace") -> str: return s.encode("latin1").decode(charset, errors) -def get_request_url(environ, use_x_forwarded_for=False): - # type: (Dict[str, str], bool) -> str +def get_request_url(environ: Dict[str, str], use_x_forwarded_for: bool = False) -> str: """Return the absolute URL without query string for the given WSGI environment.""" script_name = environ.get("SCRIPT_NAME", "").rstrip("/") @@ -79,25 +91,29 @@ class SentryWsgiMiddleware: def __init__( self, - app, # type: Callable[[Dict[str, str], Callable[..., Any]], Any] - use_x_forwarded_for=False, # type: bool - span_origin="manual", # type: str - http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: Tuple[str, ...] - ): - # type: (...) -> None + app: Callable[[Dict[str, str], Callable[..., Any]], Any], + use_x_forwarded_for: bool = False, + span_origin: Optional[str] = None, + http_methods_to_capture: Tuple[str, ...] = DEFAULT_HTTP_METHODS_TO_CAPTURE, + ) -> None: self.app = app self.use_x_forwarded_for = use_x_forwarded_for self.span_origin = span_origin self.http_methods_to_capture = http_methods_to_capture - def __call__(self, environ, start_response): - # type: (Dict[str, str], Callable[..., Any]) -> _ScopedResponse + def __call__( + self, environ: Dict[str, str], start_response: Callable[..., Any] + ) -> _ScopedResponse: if _wsgi_middleware_applied.get(False): return self.app(environ, start_response) _wsgi_middleware_applied.set(True) try: with sentry_sdk.isolation_scope() as scope: + scope.set_transaction_name( + DEFAULT_TRANSACTION_NAME, source=TransactionSource.ROUTE + ) + with track_session(scope, session_mode="request"): with capture_internal_exceptions(): scope.clear_breadcrumbs() @@ -107,53 +123,60 @@ def __call__(self, environ, start_response): environ, self.use_x_forwarded_for ) ) - method = environ.get("REQUEST_METHOD", "").upper() - transaction = None - if method in self.http_methods_to_capture: - transaction = continue_trace( - environ, - op=OP.HTTP_SERVER, - name="generic WSGI request", - source=TransactionSource.ROUTE, - origin=self.span_origin, - ) - - with ( - sentry_sdk.start_transaction( - transaction, - custom_sampling_context={"wsgi_environ": environ}, - ) - if transaction is not None - else nullcontext() - ): - try: - response = self.app( - environ, - partial( - _sentry_start_response, start_response, transaction + should_trace = method in self.http_methods_to_capture + if should_trace: + with sentry_sdk.continue_trace(environ): + with sentry_sdk.start_span( + op=OP.HTTP_SERVER, + name=DEFAULT_TRANSACTION_NAME, + source=TransactionSource.ROUTE, + origin=self.span_origin, + attributes=_prepopulate_attributes( + environ, self.use_x_forwarded_for ), - ) - except BaseException: - reraise(*_capture_exception()) + ) as span: + response = self._run_original_app( + environ, start_response, span + ) + else: + response = self._run_original_app(environ, start_response, None) + finally: _wsgi_middleware_applied.set(False) return _ScopedResponse(scope, response) - -def _sentry_start_response( # type: ignore - old_start_response, # type: StartResponse - transaction, # type: Optional[Transaction] - status, # type: str - response_headers, # type: WsgiResponseHeaders - exc_info=None, # type: Optional[WsgiExcInfo] -): - # type: (...) -> WsgiResponseIter + def _run_original_app( + self, + environ: dict[str, str], + start_response: StartResponse, + span: Optional[Span], + ) -> Any: + try: + return self.app( + environ, + partial( + _sentry_start_response, + start_response, + span, + ), + ) + except BaseException: + reraise(*_capture_exception()) + + +def _sentry_start_response( + old_start_response: StartResponse, + span: Optional[Span], + status: str, + response_headers: WsgiResponseHeaders, + exc_info: Optional[ExcInfo] = None, +) -> WsgiResponseIter: with capture_internal_exceptions(): status_int = int(status.split(" ", 1)[0]) - if transaction is not None: - transaction.set_http_status(status_int) + if span is not None: + span.set_http_status(status_int) if exc_info is None: # The Django Rest Framework WSGI test client, and likely other @@ -164,8 +187,7 @@ def _sentry_start_response( # type: ignore return old_start_response(status, response_headers, exc_info) -def _get_environ(environ): - # type: (Dict[str, str]) -> Iterator[Tuple[str, str]] +def _get_environ(environ: Dict[str, str]) -> Iterator[Tuple[str, str]]: """ Returns our explicitly included environment variables we want to capture (server name, port and remote addr if pii is enabled). @@ -181,8 +203,7 @@ def _get_environ(environ): yield key, environ[key] -def get_client_ip(environ): - # type: (Dict[str, str]) -> Optional[Any] +def get_client_ip(environ: Dict[str, str]) -> Optional[Any]: """ Infer the user IP address from various headers. This cannot be used in security sensitive situations since the value may be forged from a client, @@ -201,8 +222,7 @@ def get_client_ip(environ): return environ.get("REMOTE_ADDR") -def _capture_exception(): - # type: () -> ExcInfo +def _capture_exception() -> ExcInfo: """ Captures the current exception and sends it to Sentry. Returns the ExcInfo tuple to it can be reraised afterwards. @@ -236,17 +256,15 @@ class _ScopedResponse: __slots__ = ("_response", "_scope") - def __init__(self, scope, response): - # type: (sentry_sdk.scope.Scope, Iterator[bytes]) -> None + def __init__(self, scope: sentry_sdk.Scope, response: Iterator[bytes]) -> None: self._scope = scope self._response = response - def __iter__(self): - # type: () -> Iterator[bytes] + def __iter__(self) -> Iterator[bytes]: iterator = iter(self._response) while True: - with use_isolation_scope(self._scope): + with sentry_sdk.use_isolation_scope(self._scope): try: chunk = next(iterator) except StopIteration: @@ -256,9 +274,8 @@ def __iter__(self): yield chunk - def close(self): - # type: () -> None - with use_isolation_scope(self._scope): + def close(self) -> None: + with sentry_sdk.use_isolation_scope(self._scope): try: self._response.close() # type: ignore except AttributeError: @@ -267,8 +284,9 @@ def close(self): reraise(*_capture_exception()) -def _make_wsgi_event_processor(environ, use_x_forwarded_for): - # type: (Dict[str, str], bool) -> EventProcessor +def _make_wsgi_event_processor( + environ: Dict[str, str], use_x_forwarded_for: bool +) -> EventProcessor: # It's a bit unfortunate that we have to extract and parse the request data # from the environ so eagerly, but there are a few good reasons for this. # @@ -288,8 +306,7 @@ def _make_wsgi_event_processor(environ, use_x_forwarded_for): env = dict(_get_environ(environ)) headers = _filter_headers(dict(_get_headers(environ))) - def event_processor(event, hint): - # type: (Event, Dict[str, Any]) -> Event + def event_processor(event: Event, hint: Dict[str, Any]) -> Event: with capture_internal_exceptions(): # if the code below fails halfway through we at least have some data request_info = event.setdefault("request", {}) @@ -308,3 +325,33 @@ def event_processor(event, hint): return event return event_processor + + +def _prepopulate_attributes( + wsgi_environ: dict[str, str], use_x_forwarded_for: bool = False +) -> dict[str, str]: + """Extract span attributes from the WSGI environment.""" + attributes = {} + + for property, attr in ENVIRON_TO_ATTRIBUTE.items(): + if wsgi_environ.get(property) is not None: + attributes[attr] = wsgi_environ[property] + + if wsgi_environ.get("SERVER_PROTOCOL") is not None: + try: + proto, version = wsgi_environ["SERVER_PROTOCOL"].split("/") + attributes["network.protocol.name"] = proto + attributes["network.protocol.version"] = version + except Exception: + attributes["network.protocol.name"] = wsgi_environ["SERVER_PROTOCOL"] + + with capture_internal_exceptions(): + url = get_request_url(wsgi_environ, use_x_forwarded_for) + query = wsgi_environ.get("QUERY_STRING") + attributes["url.full"] = f"{url}?{query}" + + attributes.update( + _request_headers_to_span_attributes(dict(_get_headers(wsgi_environ))) + ) + + return attributes diff --git a/sentry_sdk/logger.py b/sentry_sdk/logger.py index c18cf91ff2..3d5d904312 100644 --- a/sentry_sdk/logger.py +++ b/sentry_sdk/logger.py @@ -1,7 +1,12 @@ # NOTE: this is the logger sentry exposes to users, not some generic logger. +from __future__ import annotations import functools import time -from typing import Any + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any from sentry_sdk import get_client from sentry_sdk.utils import safe_repr @@ -18,13 +23,14 @@ ] -def _capture_log(severity_text, severity_number, template, **kwargs): - # type: (str, int, str, **Any) -> None +def _capture_log( + severity_text: str, severity_number: int, template: str, **kwargs: Any +) -> None: client = get_client() - attrs = { + attrs: dict[str, str | bool | float | int] = { "sentry.message.template": template, - } # type: dict[str, str | bool | float | int] + } if "attributes" in kwargs: attrs.update(kwargs.pop("attributes")) for k, v in kwargs.items(): @@ -65,8 +71,7 @@ def _capture_log(severity_text, severity_number, template, **kwargs): fatal = functools.partial(_capture_log, "fatal", 21) -def _otel_severity_text(otel_severity_number): - # type: (int) -> str +def _otel_severity_text(otel_severity_number: int) -> str: for (lower, upper), severity in OTEL_RANGES: if lower <= otel_severity_number <= upper: return severity @@ -74,8 +79,7 @@ def _otel_severity_text(otel_severity_number): return "default" -def _log_level_to_otel(level, mapping): - # type: (int, dict[Any, int]) -> tuple[int, str] +def _log_level_to_otel(level: int, mapping: dict[Any, int]) -> tuple[int, str]: for py_level, otel_severity_number in sorted(mapping.items(), reverse=True): if level >= py_level: return otel_severity_number, _otel_severity_text(otel_severity_number) diff --git a/sentry_sdk/metrics.py b/sentry_sdk/metrics.py deleted file mode 100644 index 4bdbc62253..0000000000 --- a/sentry_sdk/metrics.py +++ /dev/null @@ -1,965 +0,0 @@ -import io -import os -import random -import re -import sys -import threading -import time -import warnings -import zlib -from abc import ABC, abstractmethod -from contextlib import contextmanager -from datetime import datetime, timezone -from functools import wraps, partial - -import sentry_sdk -from sentry_sdk.utils import ( - ContextVar, - now, - nanosecond_time, - to_timestamp, - serialize_frame, - json_dumps, -) -from sentry_sdk.envelope import Envelope, Item -from sentry_sdk.tracing import TransactionSource - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from typing import Any - from typing import Callable - from typing import Dict - from typing import Generator - from typing import Iterable - from typing import List - from typing import Optional - from typing import Set - from typing import Tuple - from typing import Union - - from sentry_sdk._types import BucketKey - from sentry_sdk._types import DurationUnit - from sentry_sdk._types import FlushedMetricValue - from sentry_sdk._types import MeasurementUnit - from sentry_sdk._types import MetricMetaKey - from sentry_sdk._types import MetricTagValue - from sentry_sdk._types import MetricTags - from sentry_sdk._types import MetricTagsInternal - from sentry_sdk._types import MetricType - from sentry_sdk._types import MetricValue - - -warnings.warn( - "The sentry_sdk.metrics module is deprecated and will be removed in the next major release. " - "Sentry will reject all metrics sent after October 7, 2024. " - "Learn more: https://sentry.zendesk.com/hc/en-us/articles/26369339769883-Upcoming-API-Changes-to-Metrics", - DeprecationWarning, - stacklevel=2, -) - -_in_metrics = ContextVar("in_metrics", default=False) -_set = set # set is shadowed below - -GOOD_TRANSACTION_SOURCES = frozenset( - [ - TransactionSource.ROUTE, - TransactionSource.VIEW, - TransactionSource.COMPONENT, - TransactionSource.TASK, - ] -) - -_sanitize_unit = partial(re.compile(r"[^a-zA-Z0-9_]+").sub, "") -_sanitize_metric_key = partial(re.compile(r"[^a-zA-Z0-9_\-.]+").sub, "_") -_sanitize_tag_key = partial(re.compile(r"[^a-zA-Z0-9_\-.\/]+").sub, "") - - -def _sanitize_tag_value(value): - # type: (str) -> str - table = str.maketrans( - { - "\n": "\\n", - "\r": "\\r", - "\t": "\\t", - "\\": "\\\\", - "|": "\\u{7c}", - ",": "\\u{2c}", - } - ) - return value.translate(table) - - -def get_code_location(stacklevel): - # type: (int) -> Optional[Dict[str, Any]] - try: - frm = sys._getframe(stacklevel) - except Exception: - return None - - return serialize_frame( - frm, include_local_variables=False, include_source_context=True - ) - - -@contextmanager -def recursion_protection(): - # type: () -> Generator[bool, None, None] - """Enters recursion protection and returns the old flag.""" - old_in_metrics = _in_metrics.get() - _in_metrics.set(True) - try: - yield old_in_metrics - finally: - _in_metrics.set(old_in_metrics) - - -def metrics_noop(func): - # type: (Any) -> Any - """Convenient decorator that uses `recursion_protection` to - make a function a noop. - """ - - @wraps(func) - def new_func(*args, **kwargs): - # type: (*Any, **Any) -> Any - with recursion_protection() as in_metrics: - if not in_metrics: - return func(*args, **kwargs) - - return new_func - - -class Metric(ABC): - __slots__ = () - - @abstractmethod - def __init__(self, first): - # type: (MetricValue) -> None - pass - - @property - @abstractmethod - def weight(self): - # type: () -> int - pass - - @abstractmethod - def add(self, value): - # type: (MetricValue) -> None - pass - - @abstractmethod - def serialize_value(self): - # type: () -> Iterable[FlushedMetricValue] - pass - - -class CounterMetric(Metric): - __slots__ = ("value",) - - def __init__( - self, first # type: MetricValue - ): - # type: (...) -> None - self.value = float(first) - - @property - def weight(self): - # type: (...) -> int - return 1 - - def add( - self, value # type: MetricValue - ): - # type: (...) -> None - self.value += float(value) - - def serialize_value(self): - # type: (...) -> Iterable[FlushedMetricValue] - return (self.value,) - - -class GaugeMetric(Metric): - __slots__ = ( - "last", - "min", - "max", - "sum", - "count", - ) - - def __init__( - self, first # type: MetricValue - ): - # type: (...) -> None - first = float(first) - self.last = first - self.min = first - self.max = first - self.sum = first - self.count = 1 - - @property - def weight(self): - # type: (...) -> int - # Number of elements. - return 5 - - def add( - self, value # type: MetricValue - ): - # type: (...) -> None - value = float(value) - self.last = value - self.min = min(self.min, value) - self.max = max(self.max, value) - self.sum += value - self.count += 1 - - def serialize_value(self): - # type: (...) -> Iterable[FlushedMetricValue] - return ( - self.last, - self.min, - self.max, - self.sum, - self.count, - ) - - -class DistributionMetric(Metric): - __slots__ = ("value",) - - def __init__( - self, first # type: MetricValue - ): - # type(...) -> None - self.value = [float(first)] - - @property - def weight(self): - # type: (...) -> int - return len(self.value) - - def add( - self, value # type: MetricValue - ): - # type: (...) -> None - self.value.append(float(value)) - - def serialize_value(self): - # type: (...) -> Iterable[FlushedMetricValue] - return self.value - - -class SetMetric(Metric): - __slots__ = ("value",) - - def __init__( - self, first # type: MetricValue - ): - # type: (...) -> None - self.value = {first} - - @property - def weight(self): - # type: (...) -> int - return len(self.value) - - def add( - self, value # type: MetricValue - ): - # type: (...) -> None - self.value.add(value) - - def serialize_value(self): - # type: (...) -> Iterable[FlushedMetricValue] - def _hash(x): - # type: (MetricValue) -> int - if isinstance(x, str): - return zlib.crc32(x.encode("utf-8")) & 0xFFFFFFFF - return int(x) - - return (_hash(value) for value in self.value) - - -def _encode_metrics(flushable_buckets): - # type: (Iterable[Tuple[int, Dict[BucketKey, Metric]]]) -> bytes - out = io.BytesIO() - _write = out.write - - # Note on sanitization: we intentionally sanitize in emission (serialization) - # and not during aggregation for performance reasons. This means that the - # envelope can in fact have duplicate buckets stored. This is acceptable for - # relay side emission and should not happen commonly. - - for timestamp, buckets in flushable_buckets: - for bucket_key, metric in buckets.items(): - metric_type, metric_name, metric_unit, metric_tags = bucket_key - metric_name = _sanitize_metric_key(metric_name) - metric_unit = _sanitize_unit(metric_unit) - _write(metric_name.encode("utf-8")) - _write(b"@") - _write(metric_unit.encode("utf-8")) - - for serialized_value in metric.serialize_value(): - _write(b":") - _write(str(serialized_value).encode("utf-8")) - - _write(b"|") - _write(metric_type.encode("ascii")) - - if metric_tags: - _write(b"|#") - first = True - for tag_key, tag_value in metric_tags: - tag_key = _sanitize_tag_key(tag_key) - if not tag_key: - continue - if first: - first = False - else: - _write(b",") - _write(tag_key.encode("utf-8")) - _write(b":") - _write(_sanitize_tag_value(tag_value).encode("utf-8")) - - _write(b"|T") - _write(str(timestamp).encode("ascii")) - _write(b"\n") - - return out.getvalue() - - -def _encode_locations(timestamp, code_locations): - # type: (int, Iterable[Tuple[MetricMetaKey, Dict[str, Any]]]) -> bytes - mapping = {} # type: Dict[str, List[Any]] - - for key, loc in code_locations: - metric_type, name, unit = key - mri = "{}:{}@{}".format( - metric_type, _sanitize_metric_key(name), _sanitize_unit(unit) - ) - - loc["type"] = "location" - mapping.setdefault(mri, []).append(loc) - - return json_dumps({"timestamp": timestamp, "mapping": mapping}) - - -METRIC_TYPES = { - "c": CounterMetric, - "g": GaugeMetric, - "d": DistributionMetric, - "s": SetMetric, -} # type: dict[MetricType, type[Metric]] - -# some of these are dumb -TIMING_FUNCTIONS = { - "nanosecond": nanosecond_time, - "microsecond": lambda: nanosecond_time() / 1000.0, - "millisecond": lambda: nanosecond_time() / 1000000.0, - "second": now, - "minute": lambda: now() / 60.0, - "hour": lambda: now() / 3600.0, - "day": lambda: now() / 3600.0 / 24.0, - "week": lambda: now() / 3600.0 / 24.0 / 7.0, -} - - -class LocalAggregator: - __slots__ = ("_measurements",) - - def __init__(self): - # type: (...) -> None - self._measurements = ( - {} - ) # type: Dict[Tuple[str, MetricTagsInternal], Tuple[float, float, int, float]] - - def add( - self, - ty, # type: MetricType - key, # type: str - value, # type: float - unit, # type: MeasurementUnit - tags, # type: MetricTagsInternal - ): - # type: (...) -> None - export_key = "%s:%s@%s" % (ty, key, unit) - bucket_key = (export_key, tags) - - old = self._measurements.get(bucket_key) - if old is not None: - v_min, v_max, v_count, v_sum = old - v_min = min(v_min, value) - v_max = max(v_max, value) - v_count += 1 - v_sum += value - else: - v_min = v_max = v_sum = value - v_count = 1 - self._measurements[bucket_key] = (v_min, v_max, v_count, v_sum) - - def to_json(self): - # type: (...) -> Dict[str, Any] - rv = {} # type: Any - for (export_key, tags), ( - v_min, - v_max, - v_count, - v_sum, - ) in self._measurements.items(): - rv.setdefault(export_key, []).append( - { - "tags": _tags_to_dict(tags), - "min": v_min, - "max": v_max, - "count": v_count, - "sum": v_sum, - } - ) - return rv - - -class MetricsAggregator: - ROLLUP_IN_SECONDS = 10.0 - MAX_WEIGHT = 100000 - FLUSHER_SLEEP_TIME = 5.0 - - def __init__( - self, - capture_func, # type: Callable[[Envelope], None] - enable_code_locations=False, # type: bool - ): - # type: (...) -> None - self.buckets = {} # type: Dict[int, Any] - self._enable_code_locations = enable_code_locations - self._seen_locations = _set() # type: Set[Tuple[int, MetricMetaKey]] - self._pending_locations = {} # type: Dict[int, List[Tuple[MetricMetaKey, Any]]] - self._buckets_total_weight = 0 - self._capture_func = capture_func - self._running = True - self._lock = threading.Lock() - - self._flush_event = threading.Event() # type: threading.Event - self._force_flush = False - - # The aggregator shifts its flushing by up to an entire rollup window to - # avoid multiple clients trampling on end of a 10 second window as all the - # buckets are anchored to multiples of ROLLUP seconds. We randomize this - # number once per aggregator boot to achieve some level of offsetting - # across a fleet of deployed SDKs. Relay itself will also apply independent - # jittering. - self._flush_shift = random.random() * self.ROLLUP_IN_SECONDS - - self._flusher = None # type: Optional[threading.Thread] - self._flusher_pid = None # type: Optional[int] - - def _ensure_thread(self): - # type: (...) -> bool - """For forking processes we might need to restart this thread. - This ensures that our process actually has that thread running. - """ - if not self._running: - return False - - pid = os.getpid() - if self._flusher_pid == pid: - return True - - with self._lock: - # Recheck to make sure another thread didn't get here and start the - # the flusher in the meantime - if self._flusher_pid == pid: - return True - - self._flusher_pid = pid - - self._flusher = threading.Thread(target=self._flush_loop) - self._flusher.daemon = True - - try: - self._flusher.start() - except RuntimeError: - # Unfortunately at this point the interpreter is in a state that no - # longer allows us to spawn a thread and we have to bail. - self._running = False - return False - - return True - - def _flush_loop(self): - # type: (...) -> None - _in_metrics.set(True) - while self._running or self._force_flush: - if self._running: - self._flush_event.wait(self.FLUSHER_SLEEP_TIME) - self._flush() - - def _flush(self): - # type: (...) -> None - self._emit(self._flushable_buckets(), self._flushable_locations()) - - def _flushable_buckets(self): - # type: (...) -> (Iterable[Tuple[int, Dict[BucketKey, Metric]]]) - with self._lock: - force_flush = self._force_flush - cutoff = time.time() - self.ROLLUP_IN_SECONDS - self._flush_shift - flushable_buckets = () # type: Iterable[Tuple[int, Dict[BucketKey, Metric]]] - weight_to_remove = 0 - - if force_flush: - flushable_buckets = self.buckets.items() - self.buckets = {} - self._buckets_total_weight = 0 - self._force_flush = False - else: - flushable_buckets = [] - for buckets_timestamp, buckets in self.buckets.items(): - # If the timestamp of the bucket is newer that the rollup we want to skip it. - if buckets_timestamp <= cutoff: - flushable_buckets.append((buckets_timestamp, buckets)) - - # We will clear the elements while holding the lock, in order to avoid requesting it downstream again. - for buckets_timestamp, buckets in flushable_buckets: - for metric in buckets.values(): - weight_to_remove += metric.weight - del self.buckets[buckets_timestamp] - - self._buckets_total_weight -= weight_to_remove - - return flushable_buckets - - def _flushable_locations(self): - # type: (...) -> Dict[int, List[Tuple[MetricMetaKey, Dict[str, Any]]]] - with self._lock: - locations = self._pending_locations - self._pending_locations = {} - return locations - - @metrics_noop - def add( - self, - ty, # type: MetricType - key, # type: str - value, # type: MetricValue - unit, # type: MeasurementUnit - tags, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - local_aggregator=None, # type: Optional[LocalAggregator] - stacklevel=0, # type: Optional[int] - ): - # type: (...) -> None - if not self._ensure_thread() or self._flusher is None: - return None - - if timestamp is None: - timestamp = time.time() - elif isinstance(timestamp, datetime): - timestamp = to_timestamp(timestamp) - - bucket_timestamp = int( - (timestamp // self.ROLLUP_IN_SECONDS) * self.ROLLUP_IN_SECONDS - ) - serialized_tags = _serialize_tags(tags) - bucket_key = ( - ty, - key, - unit, - serialized_tags, - ) - - with self._lock: - local_buckets = self.buckets.setdefault(bucket_timestamp, {}) - metric = local_buckets.get(bucket_key) - if metric is not None: - previous_weight = metric.weight - metric.add(value) - else: - metric = local_buckets[bucket_key] = METRIC_TYPES[ty](value) - previous_weight = 0 - - added = metric.weight - previous_weight - - if stacklevel is not None: - self.record_code_location(ty, key, unit, stacklevel + 2, timestamp) - - # Given the new weight we consider whether we want to force flush. - self._consider_force_flush() - - # For sets, we only record that a value has been added to the set but not which one. - # See develop docs: https://develop.sentry.dev/sdk/metrics/#sets - if local_aggregator is not None: - local_value = float(added if ty == "s" else value) - local_aggregator.add(ty, key, local_value, unit, serialized_tags) - - def record_code_location( - self, - ty, # type: MetricType - key, # type: str - unit, # type: MeasurementUnit - stacklevel, # type: int - timestamp=None, # type: Optional[float] - ): - # type: (...) -> None - if not self._enable_code_locations: - return - if timestamp is None: - timestamp = time.time() - meta_key = (ty, key, unit) - start_of_day = datetime.fromtimestamp(timestamp, timezone.utc).replace( - hour=0, minute=0, second=0, microsecond=0, tzinfo=None - ) - start_of_day = int(to_timestamp(start_of_day)) - - if (start_of_day, meta_key) not in self._seen_locations: - self._seen_locations.add((start_of_day, meta_key)) - loc = get_code_location(stacklevel + 3) - if loc is not None: - # Group metadata by day to make flushing more efficient. - # There needs to be one envelope item per timestamp. - self._pending_locations.setdefault(start_of_day, []).append( - (meta_key, loc) - ) - - @metrics_noop - def need_code_location( - self, - ty, # type: MetricType - key, # type: str - unit, # type: MeasurementUnit - timestamp, # type: float - ): - # type: (...) -> bool - if self._enable_code_locations: - return False - meta_key = (ty, key, unit) - start_of_day = datetime.fromtimestamp(timestamp, timezone.utc).replace( - hour=0, minute=0, second=0, microsecond=0, tzinfo=None - ) - start_of_day = int(to_timestamp(start_of_day)) - return (start_of_day, meta_key) not in self._seen_locations - - def kill(self): - # type: (...) -> None - if self._flusher is None: - return - - self._running = False - self._flush_event.set() - self._flusher = None - - @metrics_noop - def flush(self): - # type: (...) -> None - self._force_flush = True - self._flush() - - def _consider_force_flush(self): - # type: (...) -> None - # It's important to acquire a lock around this method, since it will touch shared data structures. - total_weight = len(self.buckets) + self._buckets_total_weight - if total_weight >= self.MAX_WEIGHT: - self._force_flush = True - self._flush_event.set() - - def _emit( - self, - flushable_buckets, # type: (Iterable[Tuple[int, Dict[BucketKey, Metric]]]) - code_locations, # type: Dict[int, List[Tuple[MetricMetaKey, Dict[str, Any]]]] - ): - # type: (...) -> Optional[Envelope] - envelope = Envelope() - - if flushable_buckets: - encoded_metrics = _encode_metrics(flushable_buckets) - envelope.add_item(Item(payload=encoded_metrics, type="statsd")) - - for timestamp, locations in code_locations.items(): - encoded_locations = _encode_locations(timestamp, locations) - envelope.add_item(Item(payload=encoded_locations, type="metric_meta")) - - if envelope.items: - self._capture_func(envelope) - return envelope - return None - - -def _serialize_tags( - tags, # type: Optional[MetricTags] -): - # type: (...) -> MetricTagsInternal - if not tags: - return () - - rv = [] - for key, value in tags.items(): - # If the value is a collection, we want to flatten it. - if isinstance(value, (list, tuple)): - for inner_value in value: - if inner_value is not None: - rv.append((key, str(inner_value))) - elif value is not None: - rv.append((key, str(value))) - - # It's very important to sort the tags in order to obtain the - # same bucket key. - return tuple(sorted(rv)) - - -def _tags_to_dict(tags): - # type: (MetricTagsInternal) -> Dict[str, Any] - rv = {} # type: Dict[str, Any] - for tag_name, tag_value in tags: - old_value = rv.get(tag_name) - if old_value is not None: - if isinstance(old_value, list): - old_value.append(tag_value) - else: - rv[tag_name] = [old_value, tag_value] - else: - rv[tag_name] = tag_value - return rv - - -def _get_aggregator(): - # type: () -> Optional[MetricsAggregator] - client = sentry_sdk.get_client() - return ( - client.metrics_aggregator - if client.is_active() and client.metrics_aggregator is not None - else None - ) - - -def _get_aggregator_and_update_tags(key, value, unit, tags): - # type: (str, Optional[MetricValue], MeasurementUnit, Optional[MetricTags]) -> Tuple[Optional[MetricsAggregator], Optional[LocalAggregator], Optional[MetricTags]] - client = sentry_sdk.get_client() - if not client.is_active() or client.metrics_aggregator is None: - return None, None, tags - - updated_tags = dict(tags or ()) # type: Dict[str, MetricTagValue] - updated_tags.setdefault("release", client.options["release"]) - updated_tags.setdefault("environment", client.options["environment"]) - - scope = sentry_sdk.get_current_scope() - local_aggregator = None - - # We go with the low-level API here to access transaction information as - # this one is the same between just errors and errors + performance - transaction_source = scope._transaction_info.get("source") - if transaction_source in GOOD_TRANSACTION_SOURCES: - transaction_name = scope._transaction - if transaction_name: - updated_tags.setdefault("transaction", transaction_name) - if scope._span is not None: - local_aggregator = scope._span._get_local_aggregator() - - experiments = client.options.get("_experiments", {}) - before_emit_callback = experiments.get("before_emit_metric") - if before_emit_callback is not None: - with recursion_protection() as in_metrics: - if not in_metrics: - if not before_emit_callback(key, value, unit, updated_tags): - return None, None, updated_tags - - return client.metrics_aggregator, local_aggregator, updated_tags - - -def increment( - key, # type: str - value=1.0, # type: float - unit="none", # type: MeasurementUnit - tags=None, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - stacklevel=0, # type: int -): - # type: (...) -> None - """Increments a counter.""" - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( - key, value, unit, tags - ) - if aggregator is not None: - aggregator.add( - "c", key, value, unit, tags, timestamp, local_aggregator, stacklevel - ) - - -# alias as incr is relatively common in python -incr = increment - - -class _Timing: - def __init__( - self, - key, # type: str - tags, # type: Optional[MetricTags] - timestamp, # type: Optional[Union[float, datetime]] - value, # type: Optional[float] - unit, # type: DurationUnit - stacklevel, # type: int - ): - # type: (...) -> None - self.key = key - self.tags = tags - self.timestamp = timestamp - self.value = value - self.unit = unit - self.entered = None # type: Optional[float] - self._span = None # type: Optional[sentry_sdk.tracing.Span] - self.stacklevel = stacklevel - - def _validate_invocation(self, context): - # type: (str) -> None - if self.value is not None: - raise TypeError( - "cannot use timing as %s when a value is provided" % context - ) - - def __enter__(self): - # type: (...) -> _Timing - self.entered = TIMING_FUNCTIONS[self.unit]() - self._validate_invocation("context-manager") - self._span = sentry_sdk.start_span(op="metric.timing", name=self.key) - if self.tags: - for key, value in self.tags.items(): - if isinstance(value, (tuple, list)): - value = ",".join(sorted(map(str, value))) - self._span.set_tag(key, value) - self._span.__enter__() - - # report code locations here for better accuracy - aggregator = _get_aggregator() - if aggregator is not None: - aggregator.record_code_location("d", self.key, self.unit, self.stacklevel) - - return self - - def __exit__(self, exc_type, exc_value, tb): - # type: (Any, Any, Any) -> None - assert self._span, "did not enter" - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( - self.key, - self.value, - self.unit, - self.tags, - ) - if aggregator is not None: - elapsed = TIMING_FUNCTIONS[self.unit]() - self.entered # type: ignore - aggregator.add( - "d", - self.key, - elapsed, - self.unit, - tags, - self.timestamp, - local_aggregator, - None, # code locations are reported in __enter__ - ) - - self._span.__exit__(exc_type, exc_value, tb) - self._span = None - - def __call__(self, f): - # type: (Any) -> Any - self._validate_invocation("decorator") - - @wraps(f) - def timed_func(*args, **kwargs): - # type: (*Any, **Any) -> Any - with timing( - key=self.key, - tags=self.tags, - timestamp=self.timestamp, - unit=self.unit, - stacklevel=self.stacklevel + 1, - ): - return f(*args, **kwargs) - - return timed_func - - -def timing( - key, # type: str - value=None, # type: Optional[float] - unit="second", # type: DurationUnit - tags=None, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - stacklevel=0, # type: int -): - # type: (...) -> _Timing - """Emits a distribution with the time it takes to run the given code block. - - This method supports three forms of invocation: - - - when a `value` is provided, it functions similar to `distribution` but with - - it can be used as a context manager - - it can be used as a decorator - """ - if value is not None: - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( - key, value, unit, tags - ) - if aggregator is not None: - aggregator.add( - "d", key, value, unit, tags, timestamp, local_aggregator, stacklevel - ) - return _Timing(key, tags, timestamp, value, unit, stacklevel) - - -def distribution( - key, # type: str - value, # type: float - unit="none", # type: MeasurementUnit - tags=None, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - stacklevel=0, # type: int -): - # type: (...) -> None - """Emits a distribution.""" - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( - key, value, unit, tags - ) - if aggregator is not None: - aggregator.add( - "d", key, value, unit, tags, timestamp, local_aggregator, stacklevel - ) - - -def set( - key, # type: str - value, # type: Union[int, str] - unit="none", # type: MeasurementUnit - tags=None, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - stacklevel=0, # type: int -): - # type: (...) -> None - """Emits a set.""" - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( - key, value, unit, tags - ) - if aggregator is not None: - aggregator.add( - "s", key, value, unit, tags, timestamp, local_aggregator, stacklevel - ) - - -def gauge( - key, # type: str - value, # type: float - unit="none", # type: MeasurementUnit - tags=None, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - stacklevel=0, # type: int -): - # type: (...) -> None - """Emits a gauge.""" - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( - key, value, unit, tags - ) - if aggregator is not None: - aggregator.add( - "g", key, value, unit, tags, timestamp, local_aggregator, stacklevel - ) diff --git a/sentry_sdk/monitor.py b/sentry_sdk/monitor.py index b82a528851..187b198ee4 100644 --- a/sentry_sdk/monitor.py +++ b/sentry_sdk/monitor.py @@ -1,14 +1,15 @@ +from __future__ import annotations import os import time from threading import Thread, Lock -import sentry_sdk from sentry_sdk.utils import logger from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Optional + from sentry_sdk.transport import Transport MAX_DOWNSAMPLE_FACTOR = 10 @@ -23,21 +24,19 @@ class Monitor: name = "sentry.monitor" - def __init__(self, transport, interval=10): - # type: (sentry_sdk.transport.Transport, float) -> None - self.transport = transport # type: sentry_sdk.transport.Transport - self.interval = interval # type: float + def __init__(self, transport: Transport, interval: float = 10) -> None: + self.transport: Transport = transport + self.interval: float = interval self._healthy = True - self._downsample_factor = 0 # type: int + self._downsample_factor: int = 0 - self._thread = None # type: Optional[Thread] + self._thread: Optional[Thread] = None self._thread_lock = Lock() - self._thread_for_pid = None # type: Optional[int] + self._thread_for_pid: Optional[int] = None self._running = True - def _ensure_running(self): - # type: () -> None + def _ensure_running(self) -> None: """ Check that the monitor has an active thread to run in, or create one if not. @@ -52,8 +51,7 @@ def _ensure_running(self): if self._thread_for_pid == os.getpid() and self._thread is not None: return None - def _thread(): - # type: (...) -> None + def _thread() -> None: while self._running: time.sleep(self.interval) if self._running: @@ -74,13 +72,11 @@ def _thread(): return None - def run(self): - # type: () -> None + def run(self) -> None: self.check_health() self.set_downsample_factor() - def set_downsample_factor(self): - # type: () -> None + def set_downsample_factor(self) -> None: if self._healthy: if self._downsample_factor > 0: logger.debug( @@ -95,8 +91,7 @@ def set_downsample_factor(self): self._downsample_factor, ) - def check_health(self): - # type: () -> None + def check_health(self) -> None: """ Perform the actual health checks, currently only checks if the transport is rate-limited. @@ -104,17 +99,14 @@ def check_health(self): """ self._healthy = self.transport.is_healthy() - def is_healthy(self): - # type: () -> bool + def is_healthy(self) -> bool: self._ensure_running() return self._healthy @property - def downsample_factor(self): - # type: () -> int + def downsample_factor(self) -> int: self._ensure_running() return self._downsample_factor - def kill(self): - # type: () -> None + def kill(self) -> None: self._running = False diff --git a/sentry_sdk/opentelemetry/__init__.py b/sentry_sdk/opentelemetry/__init__.py new file mode 100644 index 0000000000..2d057016c1 --- /dev/null +++ b/sentry_sdk/opentelemetry/__init__.py @@ -0,0 +1,9 @@ +from sentry_sdk.opentelemetry.propagator import SentryPropagator +from sentry_sdk.opentelemetry.sampler import SentrySampler +from sentry_sdk.opentelemetry.span_processor import SentrySpanProcessor + +__all__ = [ + "SentryPropagator", + "SentrySampler", + "SentrySpanProcessor", +] diff --git a/sentry_sdk/opentelemetry/consts.py b/sentry_sdk/opentelemetry/consts.py new file mode 100644 index 0000000000..7e2a88ddae --- /dev/null +++ b/sentry_sdk/opentelemetry/consts.py @@ -0,0 +1,40 @@ +from opentelemetry.context import create_key + + +# propagation keys +SENTRY_TRACE_KEY = create_key("sentry-trace") +SENTRY_BAGGAGE_KEY = create_key("sentry-baggage") + +# scope management keys +SENTRY_SCOPES_KEY = create_key("sentry_scopes") +SENTRY_FORK_ISOLATION_SCOPE_KEY = create_key("sentry_fork_isolation_scope") +SENTRY_USE_CURRENT_SCOPE_KEY = create_key("sentry_use_current_scope") +SENTRY_USE_ISOLATION_SCOPE_KEY = create_key("sentry_use_isolation_scope") + +# trace state keys +SENTRY_PREFIX = "sentry-" +TRACESTATE_SAMPLED_KEY = SENTRY_PREFIX + "sampled" +TRACESTATE_SAMPLE_RATE_KEY = SENTRY_PREFIX + "sample_rate" +TRACESTATE_SAMPLE_RAND_KEY = SENTRY_PREFIX + "sample_rand" + +# misc +OTEL_SENTRY_CONTEXT = "otel" +SPAN_ORIGIN = "auto.otel" + +# resource semconv attributes +# Not all of these are stable yet, so defining them here rather than importing. +# https://github.com/open-telemetry/semantic-conventions/blob/main/docs/resource/README.md#service +RESOURCE_SERVICE_NAME = "service.name" +RESOURCE_SERVICE_NAMESPACE = "service.namespace" +RESOURCE_SERVICE_VERSION = "service.version" + + +class SentrySpanAttribute: + DESCRIPTION = "sentry.description" + OP = "sentry.op" + ORIGIN = "sentry.origin" + TAG = "sentry.tag" + NAME = "sentry.name" + SOURCE = "sentry.source" + CONTEXT = "sentry.context" + CUSTOM_SAMPLED = "sentry.custom_sampled" # used for saving start_span(sampled=X) diff --git a/sentry_sdk/opentelemetry/contextvars_context.py b/sentry_sdk/opentelemetry/contextvars_context.py new file mode 100644 index 0000000000..34d7866f3c --- /dev/null +++ b/sentry_sdk/opentelemetry/contextvars_context.py @@ -0,0 +1,83 @@ +from __future__ import annotations +from typing import TYPE_CHECKING + +from opentelemetry.trace import get_current_span, set_span_in_context +from opentelemetry.trace.span import INVALID_SPAN +from opentelemetry.context import Context, get_value, set_value +from opentelemetry.context.contextvars_context import ContextVarsRuntimeContext + +import sentry_sdk +from sentry_sdk.tracing import Span +from sentry_sdk.opentelemetry.consts import ( + SENTRY_SCOPES_KEY, + SENTRY_FORK_ISOLATION_SCOPE_KEY, + SENTRY_USE_CURRENT_SCOPE_KEY, + SENTRY_USE_ISOLATION_SCOPE_KEY, +) +from sentry_sdk.opentelemetry.scope import PotelScope, validate_scopes + +if TYPE_CHECKING: + from contextvars import Token + + +class SentryContextVarsRuntimeContext(ContextVarsRuntimeContext): + def attach(self, context: Context) -> Token[Context]: + scopes = validate_scopes(get_value(SENTRY_SCOPES_KEY, context)) + + should_fork_isolation_scope = bool( + context.pop(SENTRY_FORK_ISOLATION_SCOPE_KEY, False) + ) + + should_use_isolation_scope = context.pop(SENTRY_USE_ISOLATION_SCOPE_KEY, None) + should_use_isolation_scope = ( + should_use_isolation_scope + if isinstance(should_use_isolation_scope, PotelScope) + else None + ) + + should_use_current_scope = context.pop(SENTRY_USE_CURRENT_SCOPE_KEY, None) + should_use_current_scope = ( + should_use_current_scope + if isinstance(should_use_current_scope, PotelScope) + else None + ) + + if scopes: + current_scope = scopes[0] + isolation_scope = scopes[1] + else: + current_scope = sentry_sdk.get_current_scope() + isolation_scope = sentry_sdk.get_isolation_scope() + + new_context = context + + if should_use_current_scope: + new_scope = should_use_current_scope + + # the main case where we use use_scope is for + # scope propagation in the ThreadingIntegration + # so we need to carry forward the span reference explicitly too + span = should_use_current_scope.span + if span: + new_context = set_span_in_context(span._otel_span, new_context) + + else: + new_scope = current_scope.fork() + + # carry forward a wrapped span reference since the otel context is always the + # source of truth for the active span + current_span = get_current_span(context) + if current_span != INVALID_SPAN: + new_scope._span = Span(otel_span=get_current_span(context)) + + if should_use_isolation_scope: + new_isolation_scope = should_use_isolation_scope + elif should_fork_isolation_scope: + new_isolation_scope = isolation_scope.fork() + else: + new_isolation_scope = isolation_scope + + new_scopes = (new_scope, new_isolation_scope) + + new_context = set_value(SENTRY_SCOPES_KEY, new_scopes, new_context) + return super().attach(new_context) diff --git a/sentry_sdk/integrations/opentelemetry/propagator.py b/sentry_sdk/opentelemetry/propagator.py similarity index 61% rename from sentry_sdk/integrations/opentelemetry/propagator.py rename to sentry_sdk/opentelemetry/propagator.py index b84d582d6e..f76dcc3906 100644 --- a/sentry_sdk/integrations/opentelemetry/propagator.py +++ b/sentry_sdk/opentelemetry/propagator.py @@ -1,7 +1,10 @@ +from __future__ import annotations + from opentelemetry import trace from opentelemetry.context import ( Context, get_current, + get_value, set_value, ) from opentelemetry.propagators.textmap import ( @@ -17,19 +20,24 @@ SpanContext, TraceFlags, ) +from opentelemetry.semconv.trace import SpanAttributes -from sentry_sdk.integrations.opentelemetry.consts import ( +import sentry_sdk +from sentry_sdk.consts import ( + BAGGAGE_HEADER_NAME, + SENTRY_TRACE_HEADER_NAME, +) +from sentry_sdk.opentelemetry.consts import ( SENTRY_BAGGAGE_KEY, SENTRY_TRACE_KEY, + SENTRY_SCOPES_KEY, ) -from sentry_sdk.integrations.opentelemetry.span_processor import ( - SentrySpanProcessor, -) -from sentry_sdk.tracing import ( - BAGGAGE_HEADER_NAME, - SENTRY_TRACE_HEADER_NAME, +from sentry_sdk.tracing_utils import ( + Baggage, + extract_sentrytrace_data, + should_propagate_trace, ) -from sentry_sdk.tracing_utils import Baggage, extract_sentrytrace_data +from sentry_sdk.opentelemetry.scope import validate_scopes from typing import TYPE_CHECKING @@ -42,11 +50,16 @@ class SentryPropagator(TextMapPropagator): Propagates tracing headers for Sentry's tracing system in a way OTel understands. """ - def extract(self, carrier, context=None, getter=default_getter): - # type: (CarrierT, Optional[Context], Getter[CarrierT]) -> Context + def extract( + self, + carrier: CarrierT, + context: Optional[Context] = None, + getter: Getter[CarrierT] = default_getter, + ) -> Context: if context is None: context = get_current() + # TODO-neel-potel cleanup with continue_trace / isolation_scope sentry_trace = getter.get(carrier, SENTRY_TRACE_HEADER_NAME) if not sentry_trace: return context @@ -84,34 +97,29 @@ def extract(self, carrier, context=None, getter=default_getter): modified_context = trace.set_span_in_context(span, context) return modified_context - def inject(self, carrier, context=None, setter=default_setter): - # type: (CarrierT, Optional[Context], Setter[CarrierT]) -> None - if context is None: - context = get_current() - - current_span = trace.get_current_span(context) - current_span_context = current_span.get_span_context() - - if not current_span_context.is_valid: + def inject( + self, + carrier: CarrierT, + context: Optional[Context] = None, + setter: Setter[CarrierT] = default_setter, + ) -> None: + scopes = validate_scopes(get_value(SENTRY_SCOPES_KEY, context)) + if not scopes: return - span_id = trace.format_span_id(current_span_context.span_id) - - span_map = SentrySpanProcessor().otel_span_map - sentry_span = span_map.get(span_id, None) - if not sentry_span: - return + (current_scope, _) = scopes - setter.set(carrier, SENTRY_TRACE_HEADER_NAME, sentry_span.to_traceparent()) + span = current_scope.span + if span: + span_url = span.get_attribute(SpanAttributes.HTTP_URL) + if span_url and not should_propagate_trace( + sentry_sdk.get_client(), span_url + ): + return - if sentry_span.containing_transaction: - baggage = sentry_span.containing_transaction.get_baggage() - if baggage: - baggage_data = baggage.serialize() - if baggage_data: - setter.set(carrier, BAGGAGE_HEADER_NAME, baggage_data) + for key, value in current_scope.iter_trace_propagation_headers(): + setter.set(carrier, key, value) @property - def fields(self): - # type: () -> Set[str] + def fields(self) -> Set[str]: return {SENTRY_TRACE_HEADER_NAME, BAGGAGE_HEADER_NAME} diff --git a/sentry_sdk/opentelemetry/sampler.py b/sentry_sdk/opentelemetry/sampler.py new file mode 100644 index 0000000000..878b856f5a --- /dev/null +++ b/sentry_sdk/opentelemetry/sampler.py @@ -0,0 +1,343 @@ +from __future__ import annotations +from decimal import Decimal + +from opentelemetry import trace +from opentelemetry.sdk.trace.sampling import Sampler, SamplingResult, Decision +from opentelemetry.trace.span import TraceState + +import sentry_sdk +from sentry_sdk.opentelemetry.consts import ( + TRACESTATE_SAMPLED_KEY, + TRACESTATE_SAMPLE_RAND_KEY, + TRACESTATE_SAMPLE_RATE_KEY, + SentrySpanAttribute, +) +from sentry_sdk.tracing_utils import ( + _generate_sample_rand, + has_tracing_enabled, +) +from sentry_sdk.utils import is_valid_sample_rate, logger + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Optional, Sequence + from opentelemetry.context import Context + from opentelemetry.trace import Link, SpanKind + from opentelemetry.trace.span import SpanContext + from opentelemetry.util.types import Attributes + + +def get_parent_sampled( + parent_context: Optional[SpanContext], trace_id: int +) -> Optional[bool]: + if parent_context is None: + return None + + is_span_context_valid = parent_context is not None and parent_context.is_valid + + # Only inherit sample rate if `traceId` is the same + if is_span_context_valid and parent_context.trace_id == trace_id: + # this is getSamplingDecision in JS + # if there was no sampling flag, defer the decision + dsc_sampled = parent_context.trace_state.get(TRACESTATE_SAMPLED_KEY) + if dsc_sampled == "deferred": + return None + + if parent_context.trace_flags.sampled is not None: + return parent_context.trace_flags.sampled + + if dsc_sampled == "true": + return True + elif dsc_sampled == "false": + return False + + return None + + +def get_parent_sample_rate( + parent_context: Optional[SpanContext], trace_id: int +) -> Optional[float]: + if parent_context is None: + return None + + is_span_context_valid = parent_context is not None and parent_context.is_valid + + if is_span_context_valid and parent_context.trace_id == trace_id: + parent_sample_rate = parent_context.trace_state.get(TRACESTATE_SAMPLE_RATE_KEY) + if parent_sample_rate is None: + return None + + try: + return float(parent_sample_rate) + except Exception: + return None + + return None + + +def get_parent_sample_rand( + parent_context: Optional[SpanContext], trace_id: int +) -> Optional[Decimal]: + if parent_context is None: + return None + + is_span_context_valid = parent_context is not None and parent_context.is_valid + + if is_span_context_valid and parent_context.trace_id == trace_id: + parent_sample_rand = parent_context.trace_state.get(TRACESTATE_SAMPLE_RAND_KEY) + if parent_sample_rand is None: + return None + + return Decimal(parent_sample_rand) + + return None + + +def dropped_result( + span_context: SpanContext, + attributes: Attributes, + sample_rate: Optional[float] = None, + sample_rand: Optional[Decimal] = None, +) -> SamplingResult: + """ + React to a span getting unsampled and return a DROP SamplingResult. + + Update the trace_state with the effective sampled, sample_rate and sample_rand, + record that we dropped the event for client report purposes, and return + an OTel SamplingResult with Decision.DROP. + + See for more info about OTel sampling: + https://opentelemetry-python.readthedocs.io/en/latest/sdk/trace.sampling.html + """ + trace_state = _update_trace_state( + span_context, sampled=False, sample_rate=sample_rate, sample_rand=sample_rand + ) + + is_root_span = not (span_context.is_valid and not span_context.is_remote) + if is_root_span: + # Tell Sentry why we dropped the transaction/root-span + client = sentry_sdk.get_client() + if client.monitor and client.monitor.downsample_factor > 0: + reason = "backpressure" + else: + reason = "sample_rate" + + if client.transport and has_tracing_enabled(client.options): + client.transport.record_lost_event(reason, data_category="transaction") + + # Only one span (the transaction itself) is discarded, since we did not record any spans here. + client.transport.record_lost_event(reason, data_category="span") + + return SamplingResult( + Decision.DROP, + attributes=attributes, + trace_state=trace_state, + ) + + +def sampled_result( + span_context: SpanContext, + attributes: Attributes, + sample_rate: Optional[float] = None, + sample_rand: Optional[Decimal] = None, +) -> SamplingResult: + """ + React to a span being sampled and return a sampled SamplingResult. + + Update the trace_state with the effective sampled, sample_rate and sample_rand, + and return an OTel SamplingResult with Decision.RECORD_AND_SAMPLE. + + See for more info about OTel sampling: + https://opentelemetry-python.readthedocs.io/en/latest/sdk/trace.sampling.html + """ + trace_state = _update_trace_state( + span_context, sampled=True, sample_rate=sample_rate, sample_rand=sample_rand + ) + + return SamplingResult( + Decision.RECORD_AND_SAMPLE, + attributes=attributes, + trace_state=trace_state, + ) + + +def _update_trace_state( + span_context: SpanContext, + sampled: bool, + sample_rate: Optional[float] = None, + sample_rand: Optional[Decimal] = None, +) -> TraceState: + trace_state = span_context.trace_state + + sampled = "true" if sampled else "false" + if TRACESTATE_SAMPLED_KEY not in trace_state: + trace_state = trace_state.add(TRACESTATE_SAMPLED_KEY, sampled) + elif trace_state.get(TRACESTATE_SAMPLED_KEY) == "deferred": + trace_state = trace_state.update(TRACESTATE_SAMPLED_KEY, sampled) + + if sample_rate is not None: + trace_state = trace_state.update(TRACESTATE_SAMPLE_RATE_KEY, str(sample_rate)) + + if sample_rand is not None: + trace_state = trace_state.update( + TRACESTATE_SAMPLE_RAND_KEY, f"{sample_rand:.6f}" # noqa: E231 + ) + + return trace_state + + +class SentrySampler(Sampler): + def should_sample( + self, + parent_context: Optional[Context], + trace_id: int, + name: str, + kind: Optional[SpanKind] = None, + attributes: Attributes = None, + links: Optional[Sequence[Link]] = None, + trace_state: Optional[TraceState] = None, + ) -> SamplingResult: + client = sentry_sdk.get_client() + + parent_span_context = trace.get_current_span(parent_context).get_span_context() + + attributes = attributes or {} + + # No tracing enabled, thus no sampling + if not has_tracing_enabled(client.options): + return dropped_result(parent_span_context, attributes) + + # parent_span_context.is_valid means this span has a parent, remote or local + is_root_span = not parent_span_context.is_valid or parent_span_context.is_remote + + sample_rate = None + + parent_sampled = get_parent_sampled(parent_span_context, trace_id) + parent_sample_rate = get_parent_sample_rate(parent_span_context, trace_id) + parent_sample_rand = get_parent_sample_rand(parent_span_context, trace_id) + + if parent_sample_rand is not None: + # We have a sample_rand on the incoming trace or we already backfilled + # it in PropagationContext + sample_rand = parent_sample_rand + else: + # We are the head SDK and we need to generate a new sample_rand + sample_rand = _generate_sample_rand(str(trace_id), (0, 1)) + + # Explicit sampled value provided at start_span + custom_sampled = attributes.get(SentrySpanAttribute.CUSTOM_SAMPLED) + + if custom_sampled is not None and isinstance(custom_sampled, bool): + if is_root_span: + sample_rate = float(custom_sampled) + if sample_rate > 0: + return sampled_result( + parent_span_context, + attributes, + sample_rate=sample_rate, + sample_rand=sample_rand, + ) + else: + return dropped_result( + parent_span_context, + attributes, + sample_rate=sample_rate, + sample_rand=sample_rand, + ) + else: + logger.debug( + f"[Tracing.Sampler] Ignoring sampled param for non-root span {name}" + ) + + # Check if there is a traces_sampler + # Traces_sampler is responsible to check parent sampled to have full transactions. + has_traces_sampler = callable(client.options.get("traces_sampler")) + + sample_rate_to_propagate = None + + if is_root_span and has_traces_sampler: + sampling_context = create_sampling_context( + name, attributes, parent_span_context, trace_id + ) + sample_rate = client.options["traces_sampler"](sampling_context) + sample_rate_to_propagate = sample_rate + else: + # Check if there is a parent with a sampling decision + if parent_sampled is not None: + sample_rate = bool(parent_sampled) + sample_rate_to_propagate = ( + parent_sample_rate if parent_sample_rate else sample_rate + ) + else: + # Check if there is a traces_sample_rate + sample_rate = client.options.get("traces_sample_rate") + sample_rate_to_propagate = sample_rate + + # If the sample rate is invalid, drop the span + sample_rate = is_valid_sample_rate(sample_rate, source=self.__class__.__name__) + if sample_rate is None: + logger.warning( + f"[Tracing.Sampler] Discarding {name} because of invalid sample rate." + ) + return dropped_result(parent_span_context, attributes) + + # Down-sample in case of back pressure monitor says so + if is_root_span and client.monitor: + sample_rate /= 2**client.monitor.downsample_factor + if client.monitor.downsample_factor > 0: + sample_rate_to_propagate = sample_rate + + # Compare sample_rand to sample_rate to make the final sampling decision + sampled = sample_rand < Decimal.from_float(sample_rate) + + if sampled: + if is_root_span: + logger.debug( + f"[Tracing.Sampler] Sampled #{name} with sample_rate: {sample_rate} and sample_rand: {sample_rand}" + ) + + return sampled_result( + parent_span_context, + attributes, + sample_rate=sample_rate_to_propagate, + sample_rand=None if sample_rand == parent_sample_rand else sample_rand, + ) + else: + if is_root_span: + logger.debug( + f"[Tracing.Sampler] Dropped #{name} with sample_rate: {sample_rate} and sample_rand: {sample_rand}" + ) + + return dropped_result( + parent_span_context, + attributes, + sample_rate=sample_rate_to_propagate, + sample_rand=None if sample_rand == parent_sample_rand else sample_rand, + ) + + def get_description(self) -> str: + return self.__class__.__name__ + + +def create_sampling_context( + name: str, + attributes: Attributes, + parent_span_context: Optional[SpanContext], + trace_id: int, +) -> dict[str, Any]: + sampling_context: dict[str, Any] = { + "transaction_context": { + "name": name, + "op": attributes.get(SentrySpanAttribute.OP) if attributes else None, + "source": ( + attributes.get(SentrySpanAttribute.SOURCE) if attributes else None + ), + }, + "parent_sampled": get_parent_sampled(parent_span_context, trace_id), + } + + if attributes is not None: + sampling_context.update(attributes) + + return sampling_context diff --git a/sentry_sdk/opentelemetry/scope.py b/sentry_sdk/opentelemetry/scope.py new file mode 100644 index 0000000000..28714c672e --- /dev/null +++ b/sentry_sdk/opentelemetry/scope.py @@ -0,0 +1,229 @@ +from __future__ import annotations +from contextlib import contextmanager +import warnings + +from opentelemetry.context import ( + get_value, + set_value, + attach, + detach, + get_current, +) +from opentelemetry.trace import ( + SpanContext, + NonRecordingSpan, + TraceFlags, + TraceState, + use_span, + INVALID_SPAN, +) + +from sentry_sdk.opentelemetry.consts import ( + SENTRY_SCOPES_KEY, + SENTRY_FORK_ISOLATION_SCOPE_KEY, + SENTRY_USE_CURRENT_SCOPE_KEY, + SENTRY_USE_ISOLATION_SCOPE_KEY, + TRACESTATE_SAMPLED_KEY, +) +from sentry_sdk.opentelemetry.utils import trace_state_from_baggage +from sentry_sdk.scope import Scope, ScopeType +from sentry_sdk.tracing import Span +from sentry_sdk._types import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Tuple, Optional, Generator, Dict, Any + + +class PotelScope(Scope): + @classmethod + def _get_scopes(cls) -> Optional[Tuple[PotelScope, PotelScope]]: + """ + Returns the current scopes tuple on the otel context. Internal use only. + """ + return validate_scopes(get_value(SENTRY_SCOPES_KEY)) + + @classmethod + def get_current_scope(cls) -> PotelScope: + """ + Returns the current scope. + """ + return cls._get_current_scope() or _INITIAL_CURRENT_SCOPE + + @classmethod + def _get_current_scope(cls) -> Optional[PotelScope]: + """ + Returns the current scope without creating a new one. Internal use only. + """ + scopes = cls._get_scopes() + return scopes[0] if scopes else None + + @classmethod + def get_isolation_scope(cls) -> PotelScope: + """ + Returns the isolation scope. + """ + return cls._get_isolation_scope() or _INITIAL_ISOLATION_SCOPE + + @classmethod + def _get_isolation_scope(cls) -> Optional[PotelScope]: + """ + Returns the isolation scope without creating a new one. Internal use only. + """ + scopes = cls._get_scopes() + return scopes[1] if scopes else None + + @contextmanager + def continue_trace( + self, environ_or_headers: Dict[str, Any] + ) -> Generator[None, None, None]: + """ + Sets the propagation context from environment or headers to continue an incoming trace. + Any span started within this context manager will use the same trace_id, parent_span_id + and inherit the sampling decision from the incoming trace. + """ + self.generate_propagation_context(environ_or_headers) + + span_context = self._incoming_otel_span_context() + if span_context is None: + # force a new trace since no incoming stuff + with use_span(INVALID_SPAN): + yield + else: + with use_span(NonRecordingSpan(span_context)): + yield + + @contextmanager + def new_trace(self) -> Generator[None, None, None]: + """ + Force creation of a new trace. + """ + self.generate_propagation_context() + with use_span(INVALID_SPAN): + yield + + def _incoming_otel_span_context(self) -> Optional[SpanContext]: + if self._propagation_context is None: + return None + # If sentry-trace extraction didn't have a parent_span_id, we don't have an upstream header + if self._propagation_context.parent_span_id is None: + return None + + trace_flags = TraceFlags( + TraceFlags.SAMPLED + if self._propagation_context.parent_sampled + else TraceFlags.DEFAULT + ) + + if self._propagation_context.baggage: + trace_state = trace_state_from_baggage(self._propagation_context.baggage) + else: + trace_state = TraceState() + + # for twp to work, we also need to consider deferred sampling when the sampling + # flag is not present, so the above TraceFlags are not sufficient + if self._propagation_context.parent_sampled is None: + trace_state = trace_state.update(TRACESTATE_SAMPLED_KEY, "deferred") + + span_context = SpanContext( + trace_id=int(self._propagation_context.trace_id, 16), + span_id=int(self._propagation_context.parent_span_id, 16), + is_remote=True, + trace_flags=trace_flags, + trace_state=trace_state, + ) + + return span_context + + def start_transaction(self, **kwargs: Any) -> Span: + """ + .. deprecated:: 3.0.0 + This function is deprecated and will be removed in a future release. + Use :py:meth:`sentry_sdk.start_span` instead. + """ + warnings.warn( + "The `start_transaction` method is deprecated, please use `sentry_sdk.start_span instead.`", + DeprecationWarning, + stacklevel=2, + ) + return self.start_span(**kwargs) + + def start_span(self, **kwargs: Any) -> Span: + return Span(**kwargs) + + +_INITIAL_CURRENT_SCOPE = PotelScope(ty=ScopeType.CURRENT) +_INITIAL_ISOLATION_SCOPE = PotelScope(ty=ScopeType.ISOLATION) + + +def setup_initial_scopes() -> None: + global _INITIAL_CURRENT_SCOPE, _INITIAL_ISOLATION_SCOPE + _INITIAL_CURRENT_SCOPE = PotelScope(ty=ScopeType.CURRENT) + _INITIAL_ISOLATION_SCOPE = PotelScope(ty=ScopeType.ISOLATION) + + scopes = (_INITIAL_CURRENT_SCOPE, _INITIAL_ISOLATION_SCOPE) + attach(set_value(SENTRY_SCOPES_KEY, scopes)) + + +def setup_scope_context_management() -> None: + import opentelemetry.context + from sentry_sdk.opentelemetry.contextvars_context import ( + SentryContextVarsRuntimeContext, + ) + + opentelemetry.context._RUNTIME_CONTEXT = SentryContextVarsRuntimeContext() + setup_initial_scopes() + + +@contextmanager +def isolation_scope() -> Generator[PotelScope, None, None]: + context = set_value(SENTRY_FORK_ISOLATION_SCOPE_KEY, True) + token = attach(context) + try: + yield PotelScope.get_isolation_scope() + finally: + detach(token) + + +@contextmanager +def new_scope() -> Generator[PotelScope, None, None]: + token = attach(get_current()) + try: + yield PotelScope.get_current_scope() + finally: + detach(token) + + +@contextmanager +def use_scope(scope: PotelScope) -> Generator[PotelScope, None, None]: + context = set_value(SENTRY_USE_CURRENT_SCOPE_KEY, scope) + token = attach(context) + + try: + yield scope + finally: + detach(token) + + +@contextmanager +def use_isolation_scope( + isolation_scope: PotelScope, +) -> Generator[PotelScope, None, None]: + context = set_value(SENTRY_USE_ISOLATION_SCOPE_KEY, isolation_scope) + token = attach(context) + + try: + yield isolation_scope + finally: + detach(token) + + +def validate_scopes(scopes: Any) -> Optional[Tuple[PotelScope, PotelScope]]: + if ( + isinstance(scopes, tuple) + and len(scopes) == 2 + and isinstance(scopes[0], PotelScope) + and isinstance(scopes[1], PotelScope) + ): + return scopes + else: + return None diff --git a/sentry_sdk/opentelemetry/span_processor.py b/sentry_sdk/opentelemetry/span_processor.py new file mode 100644 index 0000000000..a5dc2e23a7 --- /dev/null +++ b/sentry_sdk/opentelemetry/span_processor.py @@ -0,0 +1,320 @@ +from __future__ import annotations +from collections import deque, defaultdict + +from opentelemetry.trace import ( + format_trace_id, + format_span_id, + get_current_span, + INVALID_SPAN, + Span as AbstractSpan, +) +from opentelemetry.context import Context +from opentelemetry.sdk.trace import Span, ReadableSpan, SpanProcessor + +import sentry_sdk +from sentry_sdk.consts import SPANDATA, DEFAULT_SPAN_ORIGIN +from sentry_sdk.utils import get_current_thread_meta +from sentry_sdk.opentelemetry.consts import ( + OTEL_SENTRY_CONTEXT, + SentrySpanAttribute, +) +from sentry_sdk.opentelemetry.sampler import create_sampling_context +from sentry_sdk.opentelemetry.utils import ( + is_sentry_span, + convert_from_otel_timestamp, + extract_span_attributes, + extract_span_data, + extract_transaction_name_source, + get_trace_context, + get_profile_context, + get_sentry_meta, + set_sentry_meta, + delete_sentry_meta, +) +from sentry_sdk.profiler.continuous_profiler import ( + try_autostart_continuous_profiler, + get_profiler_id, + try_profile_lifecycle_trace_start, +) +from sentry_sdk.profiler.transaction_profiler import Profile +from sentry_sdk.utils import safe_str +from sentry_sdk._types import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Optional, List, Any, Deque, DefaultDict + from sentry_sdk._types import Event + + +DEFAULT_MAX_SPANS = 1000 + + +class SentrySpanProcessor(SpanProcessor): + """ + Converts OTel spans into Sentry spans so they can be sent to the Sentry backend. + """ + + def __new__(cls) -> SentrySpanProcessor: + if not hasattr(cls, "instance"): + cls.instance = super().__new__(cls) + + return cls.instance + + def __init__(self) -> None: + self._children_spans: DefaultDict[int, List[ReadableSpan]] = defaultdict(list) + self._dropped_spans: DefaultDict[int, int] = defaultdict(lambda: 0) + + def on_start(self, span: Span, parent_context: Optional[Context] = None) -> None: + if is_sentry_span(span): + return + + self._add_root_span(span, get_current_span(parent_context)) + self._start_profile(span) + + def on_end(self, span: ReadableSpan) -> None: + if is_sentry_span(span): + return + + is_root_span = not span.parent or span.parent.is_remote + if is_root_span: + # if have a root span ending, stop the profiler, build a transaction and send it + self._stop_profile(span) + self._flush_root_span(span) + else: + self._append_child_span(span) + + # TODO-neel-potel not sure we need a clear like JS + def shutdown(self) -> None: + pass + + # TODO-neel-potel change default? this is 30 sec + # TODO-neel-potel call this in client.flush + def force_flush(self, timeout_millis: int = 30000) -> bool: + return True + + def _add_root_span(self, span: Span, parent_span: AbstractSpan) -> None: + """ + This is required to make Span.root_span work + since we can't traverse back to the root purely with otel efficiently. + """ + if parent_span != INVALID_SPAN and not parent_span.get_span_context().is_remote: + # child span points to parent's root or parent + parent_root_span = get_sentry_meta(parent_span, "root_span") + set_sentry_meta(span, "root_span", parent_root_span or parent_span) + else: + # root span points to itself + set_sentry_meta(span, "root_span", span) + + def _start_profile(self, span: Span) -> None: + try_autostart_continuous_profiler() + + profiler_id = get_profiler_id() + thread_id, thread_name = get_current_thread_meta() + + if profiler_id: + span.set_attribute(SPANDATA.PROFILER_ID, profiler_id) + if thread_id: + span.set_attribute(SPANDATA.THREAD_ID, str(thread_id)) + if thread_name: + span.set_attribute(SPANDATA.THREAD_NAME, thread_name) + + is_root_span = not span.parent or span.parent.is_remote + sampled = span.context and span.context.trace_flags.sampled + + if is_root_span and sampled: + # profiler uses time.perf_counter_ns() so we cannot use the + # unix timestamp that is on span.start_time + # setting it to 0 means the profiler will internally measure time on start + profile = Profile(sampled, 0) + + sampling_context = create_sampling_context( + span.name, span.attributes, span.parent, span.context.trace_id + ) + profile._set_initial_sampling_decision(sampling_context) + profile.__enter__() + set_sentry_meta(span, "profile", profile) + + continuous_profile = try_profile_lifecycle_trace_start() + profiler_id = get_profiler_id() + if profiler_id: + span.set_attribute(SPANDATA.PROFILER_ID, profiler_id) + set_sentry_meta(span, "continuous_profile", continuous_profile) + + def _stop_profile(self, span: ReadableSpan) -> None: + continuous_profiler = get_sentry_meta(span, "continuous_profile") + if continuous_profiler: + continuous_profiler.stop() + + def _flush_root_span(self, span: ReadableSpan) -> None: + transaction_event = self._root_span_to_transaction_event(span) + if not transaction_event: + return + + collected_spans, dropped_spans = self._collect_children(span) + spans = [] + for child in collected_spans: + span_json = self._span_to_json(child) + if span_json: + spans.append(span_json) + + transaction_event["spans"] = spans + if dropped_spans > 0: + transaction_event["_dropped_spans"] = dropped_spans + + # TODO-neel-potel sort and cutoff max spans + + sentry_sdk.capture_event(transaction_event) + self._cleanup_references([span] + collected_spans) + + def _append_child_span(self, span: ReadableSpan) -> None: + if not span.parent: + return + + max_spans = ( + sentry_sdk.get_client().options["_experiments"].get("max_spans") + or DEFAULT_MAX_SPANS + ) + + children_spans = self._children_spans[span.parent.span_id] + if len(children_spans) < max_spans: + children_spans.append(span) + else: + self._dropped_spans[span.parent.span_id] += 1 + + def _collect_children(self, span: ReadableSpan) -> tuple[List[ReadableSpan], int]: + if not span.context: + return [], 0 + + children = [] + dropped_spans = 0 + bfs_queue: Deque[int] = deque() + bfs_queue.append(span.context.span_id) + + while bfs_queue: + parent_span_id = bfs_queue.popleft() + node_children = self._children_spans.pop(parent_span_id, []) + dropped_spans += self._dropped_spans.pop(parent_span_id, 0) + children.extend(node_children) + bfs_queue.extend( + [child.context.span_id for child in node_children if child.context] + ) + + return children, dropped_spans + + # we construct the event from scratch here + # and not use the current Transaction class for easier refactoring + def _root_span_to_transaction_event(self, span: ReadableSpan) -> Optional[Event]: + if not span.context: + return None + + event = self._common_span_transaction_attributes_as_json(span) + if event is None: + return None + + transaction_name, transaction_source = extract_transaction_name_source(span) + span_data = extract_span_data(span) + trace_context = get_trace_context(span, span_data=span_data) + contexts = {"trace": trace_context} + + profile_context = get_profile_context(span) + if profile_context: + contexts["profile"] = profile_context + + if span_data.http_status: + contexts["response"] = {"status_code": span_data.http_status} + + if span.resource.attributes: + contexts[OTEL_SENTRY_CONTEXT] = {"resource": dict(span.resource.attributes)} + + event.setdefault("tags", {}) + event.update( + { + "type": "transaction", + "transaction": transaction_name or span_data.description, + "transaction_info": {"source": transaction_source or "custom"}, + "contexts": contexts, + } + ) + + profile = get_sentry_meta(span, "profile") + if profile is not None and isinstance(profile, Profile): + profile.__exit__(None, None, None) + if profile.valid(): + event["profile"] = profile + + return event + + def _span_to_json(self, span: ReadableSpan) -> Optional[dict[str, Any]]: + if not span.context: + return None + + # need to ignore the type here due to TypedDict nonsense + span_json: Optional[dict[str, Any]] = self._common_span_transaction_attributes_as_json(span) # type: ignore + if span_json is None: + return None + + trace_id = format_trace_id(span.context.trace_id) + span_id = format_span_id(span.context.span_id) + parent_span_id = format_span_id(span.parent.span_id) if span.parent else None + + span_data = extract_span_data(span) + + span_json.update( + { + "trace_id": trace_id, + "span_id": span_id, + "description": span_data.description, + "origin": span_data.origin or DEFAULT_SPAN_ORIGIN, + } + ) + + if span_data.op: + span_json["op"] = span_data.op + if span_data.status: + span_json["status"] = span_data.status + if parent_span_id: + span_json["parent_span_id"] = parent_span_id + + attributes = getattr(span, "attributes", {}) or {} + if attributes: + span_json["data"] = {} + for key, value in attributes.items(): + if not key.startswith("_"): + span_json["data"][key] = value + + return span_json + + def _common_span_transaction_attributes_as_json( + self, span: ReadableSpan + ) -> Optional[Event]: + if not span.start_time or not span.end_time: + return None + + common_json: Event = { + "start_timestamp": convert_from_otel_timestamp(span.start_time), + "timestamp": convert_from_otel_timestamp(span.end_time), + } + + tags = extract_span_attributes(span, SentrySpanAttribute.TAG) + if tags: + common_json["tags"] = { + tag: safe_str(tag_value) for tag, tag_value in tags.items() + } + + return common_json + + def _cleanup_references(self, spans: List[ReadableSpan]) -> None: + for span in spans: + delete_sentry_meta(span) + + def _log_debug_info(self) -> None: + import pprint + + pprint.pprint( + { + format_span_id(span_id): [ + (format_span_id(child.context.span_id), child.name) + for child in children + ] + for span_id, children in self._children_spans.items() + } + ) diff --git a/sentry_sdk/opentelemetry/tracing.py b/sentry_sdk/opentelemetry/tracing.py new file mode 100644 index 0000000000..0038f40540 --- /dev/null +++ b/sentry_sdk/opentelemetry/tracing.py @@ -0,0 +1,79 @@ +from __future__ import annotations +from opentelemetry import trace +from opentelemetry.propagate import set_global_textmap +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider, Span, ReadableSpan + +from sentry_sdk.consts import VERSION +from sentry_sdk.opentelemetry import ( + SentryPropagator, + SentrySampler, + SentrySpanProcessor, +) +from sentry_sdk.opentelemetry.consts import ( + RESOURCE_SERVICE_NAME, + RESOURCE_SERVICE_NAMESPACE, + RESOURCE_SERVICE_VERSION, +) +from sentry_sdk.utils import logger + + +READABLE_SPAN_PATCHED = False + + +def patch_readable_span() -> None: + """ + We need to pass through sentry specific metadata/objects from Span to ReadableSpan + to work with them consistently in the SpanProcessor. + """ + global READABLE_SPAN_PATCHED + if not READABLE_SPAN_PATCHED: + old_readable_span = Span._readable_span + + def sentry_patched_readable_span(self: Span) -> ReadableSpan: + readable_span = old_readable_span(self) + readable_span._sentry_meta = getattr(self, "_sentry_meta", {}) # type: ignore[attr-defined] + return readable_span + + Span._readable_span = sentry_patched_readable_span # type: ignore[method-assign] + READABLE_SPAN_PATCHED = True + + +def setup_sentry_tracing() -> None: + # TracerProvider can only be set once. If we're the first ones setting it, + # there's no issue. If it already exists, we need to patch it. + from opentelemetry.trace import _TRACER_PROVIDER + + if _TRACER_PROVIDER is not None: + logger.debug("[Tracing] Detected an existing TracerProvider, patching") + tracer_provider = _TRACER_PROVIDER + tracer_provider.sampler = SentrySampler() # type: ignore[attr-defined] + + else: + logger.debug("[Tracing] No TracerProvider set, creating a new one") + tracer_provider = TracerProvider( + sampler=SentrySampler(), + resource=Resource.create( + { + RESOURCE_SERVICE_NAME: "sentry-python", + RESOURCE_SERVICE_VERSION: VERSION, + RESOURCE_SERVICE_NAMESPACE: "sentry", + } + ), + ) + trace.set_tracer_provider(tracer_provider) + + try: + existing_span_processors = ( + tracer_provider._active_span_processor._span_processors # type: ignore[attr-defined] + ) + except Exception: + existing_span_processors = [] + + for span_processor in existing_span_processors: + if isinstance(span_processor, SentrySpanProcessor): + break + else: + tracer_provider.add_span_processor(SentrySpanProcessor()) # type: ignore[attr-defined] + + set_global_textmap(SentryPropagator()) diff --git a/sentry_sdk/opentelemetry/utils.py b/sentry_sdk/opentelemetry/utils.py new file mode 100644 index 0000000000..15bf9a6083 --- /dev/null +++ b/sentry_sdk/opentelemetry/utils.py @@ -0,0 +1,468 @@ +from __future__ import annotations +import re +from datetime import datetime, timezone +from dataclasses import dataclass + +from urllib3.util import parse_url as urlparse +from urllib.parse import quote, unquote +from opentelemetry.trace import ( + Span as AbstractSpan, + SpanKind, + StatusCode, + format_trace_id, + format_span_id, + TraceState, +) +from opentelemetry.semconv.trace import SpanAttributes +from opentelemetry.sdk.trace import ReadableSpan + +import sentry_sdk +from sentry_sdk.utils import Dsn +from sentry_sdk.consts import ( + SPANSTATUS, + OP, + SPANDATA, + DEFAULT_SPAN_ORIGIN, + LOW_QUALITY_TRANSACTION_SOURCES, +) +from sentry_sdk.opentelemetry.consts import SentrySpanAttribute +from sentry_sdk.tracing_utils import Baggage, get_span_status_from_http_code + +from sentry_sdk._types import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Optional, Union, Type, TypeVar + from opentelemetry.util.types import Attributes + + T = TypeVar("T") + + +GRPC_ERROR_MAP = { + "1": SPANSTATUS.CANCELLED, + "2": SPANSTATUS.UNKNOWN_ERROR, + "3": SPANSTATUS.INVALID_ARGUMENT, + "4": SPANSTATUS.DEADLINE_EXCEEDED, + "5": SPANSTATUS.NOT_FOUND, + "6": SPANSTATUS.ALREADY_EXISTS, + "7": SPANSTATUS.PERMISSION_DENIED, + "8": SPANSTATUS.RESOURCE_EXHAUSTED, + "9": SPANSTATUS.FAILED_PRECONDITION, + "10": SPANSTATUS.ABORTED, + "11": SPANSTATUS.OUT_OF_RANGE, + "12": SPANSTATUS.UNIMPLEMENTED, + "13": SPANSTATUS.INTERNAL_ERROR, + "14": SPANSTATUS.UNAVAILABLE, + "15": SPANSTATUS.DATA_LOSS, + "16": SPANSTATUS.UNAUTHENTICATED, +} + + +def is_sentry_span(span: ReadableSpan) -> bool: + """ + Break infinite loop: + HTTP requests to Sentry are caught by OTel and send again to Sentry. + """ + from sentry_sdk import get_client + + if not span.attributes: + return False + + span_url = get_typed_attribute(span.attributes, SpanAttributes.HTTP_URL, str) + if span_url is None: + return False + + dsn_url = None + client = get_client() + + if client.dsn: + try: + dsn_url = Dsn(client.dsn).netloc + except Exception: + pass + + if not dsn_url: + return False + + if dsn_url in span_url: + return True + + return False + + +def convert_from_otel_timestamp(time: int) -> datetime: + """Convert an OTel nanosecond-level timestamp to a datetime.""" + return datetime.fromtimestamp(time / 1e9, timezone.utc) + + +def convert_to_otel_timestamp(time: Union[datetime, float]) -> int: + """Convert a datetime to an OTel timestamp (with nanosecond precision).""" + if isinstance(time, datetime): + return int(time.timestamp() * 1e9) + return int(time * 1e9) + + +def extract_transaction_name_source( + span: ReadableSpan, +) -> tuple[Optional[str], Optional[str]]: + if not span.attributes: + return (None, None) + return ( + get_typed_attribute(span.attributes, SentrySpanAttribute.NAME, str), + get_typed_attribute(span.attributes, SentrySpanAttribute.SOURCE, str), + ) + + +@dataclass +class ExtractedSpanData: + description: str + op: Optional[str] = None + status: Optional[str] = None + http_status: Optional[int] = None + origin: Optional[str] = None + + +def extract_span_data(span: ReadableSpan) -> ExtractedSpanData: + """ + Try to populate sane values for op, description and statuses based on what we have. + The op and description mapping is fundamentally janky because otel only has a single `name`. + + Priority is given first to attributes explicitly defined by us via the SDK. + Otherwise we try to infer sane values from other attributes. + """ + op = get_typed_attribute(span.attributes, SentrySpanAttribute.OP, str) or infer_op( + span + ) + + description = ( + get_typed_attribute(span.attributes, SentrySpanAttribute.DESCRIPTION, str) + or get_typed_attribute(span.attributes, SentrySpanAttribute.NAME, str) + or infer_description(span) + ) + + origin = get_typed_attribute(span.attributes, SentrySpanAttribute.ORIGIN, str) + + (status, http_status) = extract_span_status(span) + + return ExtractedSpanData( + description=description or span.name, + op=op, + status=status, + http_status=http_status, + origin=origin, + ) + + +def infer_op(span: ReadableSpan) -> Optional[str]: + """ + Try to infer op for the various types of instrumentation. + """ + if span.attributes is None: + return None + + if SpanAttributes.HTTP_METHOD in span.attributes: + if span.kind == SpanKind.SERVER: + return OP.HTTP_SERVER + elif span.kind == SpanKind.CLIENT: + return OP.HTTP_CLIENT + else: + return OP.HTTP + elif SpanAttributes.DB_SYSTEM in span.attributes: + return OP.DB + elif SpanAttributes.RPC_SERVICE in span.attributes: + return OP.RPC + elif SpanAttributes.MESSAGING_SYSTEM in span.attributes: + return OP.MESSAGE + elif SpanAttributes.FAAS_TRIGGER in span.attributes: + return get_typed_attribute(span.attributes, SpanAttributes.FAAS_TRIGGER, str) + else: + return None + + +def infer_description(span: ReadableSpan) -> Optional[str]: + if span.attributes is None: + return None + + if SpanAttributes.HTTP_METHOD in span.attributes: + http_method = get_typed_attribute( + span.attributes, SpanAttributes.HTTP_METHOD, str + ) + route = get_typed_attribute(span.attributes, SpanAttributes.HTTP_ROUTE, str) + target = get_typed_attribute(span.attributes, SpanAttributes.HTTP_TARGET, str) + peer_name = get_typed_attribute( + span.attributes, SpanAttributes.NET_PEER_NAME, str + ) + url = get_typed_attribute(span.attributes, SpanAttributes.HTTP_URL, str) + + if route: + return f"{http_method} {route}" + elif target: + return f"{http_method} {target}" + elif peer_name: + return f"{http_method} {peer_name}" + elif url: + parsed_url = urlparse(url) + url = "{}://{}{}".format( + parsed_url.scheme, parsed_url.netloc, parsed_url.path + ) + return f"{http_method} {url}" + else: + return http_method + elif SpanAttributes.DB_SYSTEM in span.attributes: + return get_typed_attribute(span.attributes, SpanAttributes.DB_STATEMENT, str) + else: + return None + + +def extract_span_status(span: ReadableSpan) -> tuple[Optional[str], Optional[int]]: + """ + Extract a reasonable Sentry SPANSTATUS and a HTTP status code from the otel span. + OKs are simply OKs. + ERRORs first try to map to HTTP/GRPC statuses via attributes otherwise fallback + on the description if it is a valid status for Sentry. + In the final UNSET case, we try to infer HTTP/GRPC. + """ + status = span.status + http_status = get_http_status_code(span.attributes) + final_status = None + + if status.status_code == StatusCode.OK: + final_status = SPANSTATUS.OK + elif status.status_code == StatusCode.ERROR: + inferred_status = infer_status_from_attributes(span.attributes, http_status) + + if inferred_status is not None: + final_status = inferred_status + elif ( + status.description is not None + and status.description in GRPC_ERROR_MAP.values() + ): + final_status = status.description + else: + final_status = SPANSTATUS.UNKNOWN_ERROR + else: + # UNSET case + final_status = infer_status_from_attributes(span.attributes, http_status) + + return (final_status, http_status) + + +def infer_status_from_attributes( + span_attributes: Attributes, http_status: Optional[int] +) -> Optional[str]: + if span_attributes is None: + return None + + if http_status: + return get_span_status_from_http_code(http_status) + + grpc_status = span_attributes.get(SpanAttributes.RPC_GRPC_STATUS_CODE) + if grpc_status: + return GRPC_ERROR_MAP.get(str(grpc_status), SPANSTATUS.UNKNOWN_ERROR) + + return None + + +def get_http_status_code(span_attributes: Attributes) -> Optional[int]: + try: + http_status = get_typed_attribute( + span_attributes, SpanAttributes.HTTP_RESPONSE_STATUS_CODE, int + ) + except AttributeError: + # HTTP_RESPONSE_STATUS_CODE was added in 1.21, so if we're on an older + # OTel version SpanAttributes.HTTP_RESPONSE_STATUS_CODE will throw an + # AttributeError + http_status = None + + if http_status is None: + # Fall back to the deprecated attribute + http_status = get_typed_attribute( + span_attributes, SpanAttributes.HTTP_STATUS_CODE, int + ) + + return http_status + + +def extract_span_attributes(span: ReadableSpan, namespace: str) -> dict[str, Any]: + """ + Extract Sentry-specific span attributes and make them look the way Sentry expects. + """ + extracted_attrs: dict[str, Any] = {} + + for attr, value in (span.attributes or {}).items(): + if attr.startswith(namespace): + key = attr[len(namespace) + 1 :] + extracted_attrs[key] = value + + return extracted_attrs + + +def get_trace_context( + span: ReadableSpan, span_data: Optional[ExtractedSpanData] = None +) -> dict[str, Any]: + if not span.context: + return {} + + trace_id = format_trace_id(span.context.trace_id) + span_id = format_span_id(span.context.span_id) + parent_span_id = format_span_id(span.parent.span_id) if span.parent else None + + if span_data is None: + span_data = extract_span_data(span) + + trace_context: dict[str, Any] = { + "trace_id": trace_id, + "span_id": span_id, + "parent_span_id": parent_span_id, + "origin": span_data.origin or DEFAULT_SPAN_ORIGIN, + } + + if span_data.op: + trace_context["op"] = span_data.op + if span_data.status: + trace_context["status"] = span_data.status + if span.attributes: + trace_context["data"] = dict(span.attributes) + + trace_state = get_trace_state(span) + trace_context["dynamic_sampling_context"] = dsc_from_trace_state(trace_state) + + return trace_context + + +def trace_state_from_baggage(baggage: Baggage) -> TraceState: + items = [] + for k, v in baggage.sentry_items.items(): + key = Baggage.SENTRY_PREFIX + quote(k) + val = quote(str(v)) + items.append((key, val)) + return TraceState(items) + + +def baggage_from_trace_state(trace_state: TraceState) -> Baggage: + return Baggage(dsc_from_trace_state(trace_state)) + + +def serialize_trace_state(trace_state: TraceState) -> str: + sentry_items = [] + for k, v in trace_state.items(): + if Baggage.SENTRY_PREFIX_REGEX.match(k): + sentry_items.append((k, v)) + return ",".join(key + "=" + value for key, value in sentry_items) + + +def dsc_from_trace_state(trace_state: TraceState) -> dict[str, str]: + dsc = {} + for k, v in trace_state.items(): + if Baggage.SENTRY_PREFIX_REGEX.match(k): + key = re.sub(Baggage.SENTRY_PREFIX_REGEX, "", k) + dsc[unquote(key)] = unquote(v) + return dsc + + +def has_incoming_trace(trace_state: TraceState) -> bool: + """ + The existence of a sentry-trace_id in the baggage implies we continued an upstream trace. + """ + return (Baggage.SENTRY_PREFIX + "trace_id") in trace_state + + +def get_trace_state(span: Union[AbstractSpan, ReadableSpan]) -> TraceState: + """ + Get the existing trace_state with sentry items + or populate it if we are the head SDK. + """ + span_context = span.get_span_context() + if not span_context: + return TraceState() + + trace_state = span_context.trace_state + + if has_incoming_trace(trace_state): + return trace_state + else: + client = sentry_sdk.get_client() + if not client.is_active(): + return trace_state + + options = client.options or {} + + trace_state = trace_state.update( + Baggage.SENTRY_PREFIX + "trace_id", + quote(format_trace_id(span_context.trace_id)), + ) + + if options.get("environment"): + trace_state = trace_state.update( + Baggage.SENTRY_PREFIX + "environment", quote(options["environment"]) + ) + + if options.get("release"): + trace_state = trace_state.update( + Baggage.SENTRY_PREFIX + "release", quote(options["release"]) + ) + + if options.get("dsn"): + trace_state = trace_state.update( + Baggage.SENTRY_PREFIX + "public_key", + quote(Dsn(options["dsn"]).public_key), + ) + + root_span = get_sentry_meta(span, "root_span") + if root_span and isinstance(root_span, ReadableSpan): + transaction_name, transaction_source = extract_transaction_name_source( + root_span + ) + + if ( + transaction_name + and transaction_source not in LOW_QUALITY_TRANSACTION_SOURCES + ): + trace_state = trace_state.update( + Baggage.SENTRY_PREFIX + "transaction", quote(transaction_name) + ) + + return trace_state + + +def get_sentry_meta(span: Union[AbstractSpan, ReadableSpan], key: str) -> Any: + sentry_meta = getattr(span, "_sentry_meta", None) + return sentry_meta.get(key) if sentry_meta else None + + +def set_sentry_meta( + span: Union[AbstractSpan, ReadableSpan], key: str, value: Any +) -> None: + sentry_meta = getattr(span, "_sentry_meta", {}) + sentry_meta[key] = value + span._sentry_meta = sentry_meta # type: ignore[union-attr] + + +def delete_sentry_meta(span: Union[AbstractSpan, ReadableSpan]) -> None: + try: + del span._sentry_meta # type: ignore[union-attr] + except AttributeError: + pass + + +def get_profile_context(span: ReadableSpan) -> Optional[dict[str, str]]: + if not span.attributes: + return None + + profiler_id = get_typed_attribute(span.attributes, SPANDATA.PROFILER_ID, str) + if profiler_id is None: + return None + + return {"profiler_id": profiler_id} + + +def get_typed_attribute(attributes: Attributes, key: str, type: Type[T]) -> Optional[T]: + """ + helper method to coerce types of attribute values + """ + if attributes is None: + return None + value = attributes.get(key) + if value is not None and isinstance(value, type): + return value + else: + return None diff --git a/sentry_sdk/profiler/__init__.py b/sentry_sdk/profiler/__init__.py index 0bc63e3a6d..762bd4d9cf 100644 --- a/sentry_sdk/profiler/__init__.py +++ b/sentry_sdk/profiler/__init__.py @@ -1,49 +1,9 @@ from sentry_sdk.profiler.continuous_profiler import ( - start_profile_session, start_profiler, - stop_profile_session, stop_profiler, ) -from sentry_sdk.profiler.transaction_profiler import ( - MAX_PROFILE_DURATION_NS, - PROFILE_MINIMUM_SAMPLES, - Profile, - Scheduler, - ThreadScheduler, - GeventScheduler, - has_profiling_enabled, - setup_profiler, - teardown_profiler, -) -from sentry_sdk.profiler.utils import ( - DEFAULT_SAMPLING_FREQUENCY, - MAX_STACK_DEPTH, - get_frame_name, - extract_frame, - extract_stack, - frame_id, -) __all__ = [ - "start_profile_session", # TODO: Deprecate this in favor of `start_profiler` "start_profiler", - "stop_profile_session", # TODO: Deprecate this in favor of `stop_profiler` "stop_profiler", - # DEPRECATED: The following was re-exported for backwards compatibility. It - # will be removed from sentry_sdk.profiler in a future release. - "MAX_PROFILE_DURATION_NS", - "PROFILE_MINIMUM_SAMPLES", - "Profile", - "Scheduler", - "ThreadScheduler", - "GeventScheduler", - "has_profiling_enabled", - "setup_profiler", - "teardown_profiler", - "DEFAULT_SAMPLING_FREQUENCY", - "MAX_STACK_DEPTH", - "get_frame_name", - "extract_frame", - "extract_stack", - "frame_id", ] diff --git a/sentry_sdk/profiler/continuous_profiler.py b/sentry_sdk/profiler/continuous_profiler.py index 00dd29e36c..27e4a42999 100644 --- a/sentry_sdk/profiler/continuous_profiler.py +++ b/sentry_sdk/profiler/continuous_profiler.py @@ -1,3 +1,4 @@ +from __future__ import annotations import atexit import os import random @@ -5,7 +6,6 @@ import threading import time import uuid -import warnings from collections import deque from datetime import datetime, timezone @@ -61,18 +61,19 @@ from gevent.monkey import get_original from gevent.threadpool import ThreadPool as _ThreadPool - ThreadPool = _ThreadPool # type: Optional[Type[_ThreadPool]] + ThreadPool: Optional[Type[_ThreadPool]] = _ThreadPool thread_sleep = get_original("time", "sleep") except ImportError: thread_sleep = time.sleep ThreadPool = None -_scheduler = None # type: Optional[ContinuousScheduler] +_scheduler: Optional[ContinuousScheduler] = None -def setup_continuous_profiler(options, sdk_info, capture_func): - # type: (Dict[str, Any], SDKInfo, Callable[[Envelope], None]) -> bool +def setup_continuous_profiler( + options: Dict[str, Any], sdk_info: SDKInfo, capture_func: Callable[[Envelope], None] +) -> bool: global _scheduler if _scheduler is not None: @@ -88,15 +89,9 @@ def setup_continuous_profiler(options, sdk_info, capture_func): else: default_profiler_mode = ThreadContinuousScheduler.mode + profiler_mode = default_profiler_mode if options.get("profiler_mode") is not None: profiler_mode = options["profiler_mode"] - else: - # TODO: deprecate this and just use the existing `profiler_mode` - experiments = options.get("_experiments", {}) - - profiler_mode = ( - experiments.get("continuous_profiling_mode") or default_profiler_mode - ) frequency = DEFAULT_SAMPLING_FREQUENCY @@ -122,8 +117,7 @@ def setup_continuous_profiler(options, sdk_info, capture_func): return True -def try_autostart_continuous_profiler(): - # type: () -> None +def try_autostart_continuous_profiler() -> None: # TODO: deprecate this as it'll be replaced by the auto lifecycle option @@ -136,69 +130,43 @@ def try_autostart_continuous_profiler(): _scheduler.manual_start() -def try_profile_lifecycle_trace_start(): - # type: () -> Union[ContinuousProfile, None] +def try_profile_lifecycle_trace_start() -> Union[ContinuousProfile, None]: if _scheduler is None: return None return _scheduler.auto_start() -def start_profiler(): - # type: () -> None +def start_profiler() -> None: if _scheduler is None: return _scheduler.manual_start() -def start_profile_session(): - # type: () -> None - - warnings.warn( - "The `start_profile_session` function is deprecated. Please use `start_profile` instead.", - DeprecationWarning, - stacklevel=2, - ) - start_profiler() - - -def stop_profiler(): - # type: () -> None +def stop_profiler() -> None: if _scheduler is None: return _scheduler.manual_stop() -def stop_profile_session(): - # type: () -> None - - warnings.warn( - "The `stop_profile_session` function is deprecated. Please use `stop_profile` instead.", - DeprecationWarning, - stacklevel=2, - ) - stop_profiler() - - -def teardown_continuous_profiler(): - # type: () -> None +def teardown_continuous_profiler() -> None: stop_profiler() global _scheduler _scheduler = None -def get_profiler_id(): - # type: () -> Union[str, None] +def get_profiler_id() -> Union[str, None]: if _scheduler is None: return None return _scheduler.profiler_id -def determine_profile_session_sampling_decision(sample_rate): - # type: (Union[float, None]) -> bool +def determine_profile_session_sampling_decision( + sample_rate: Union[float, None], +) -> bool: # `None` is treated as `0.0` if not sample_rate: @@ -210,16 +178,20 @@ def determine_profile_session_sampling_decision(sample_rate): class ContinuousProfile: active: bool = True - def stop(self): - # type: () -> None + def stop(self) -> None: self.active = False class ContinuousScheduler: - mode = "unknown" # type: ContinuousProfilerMode - - def __init__(self, frequency, options, sdk_info, capture_func): - # type: (int, Dict[str, Any], SDKInfo, Callable[[Envelope], None]) -> None + mode: ContinuousProfilerMode = "unknown" + + def __init__( + self, + frequency: int, + options: Dict[str, Any], + sdk_info: SDKInfo, + capture_func: Callable[[Envelope], None], + ) -> None: self.interval = 1.0 / frequency self.options = options self.sdk_info = sdk_info @@ -232,17 +204,16 @@ def __init__(self, frequency, options, sdk_info, capture_func): ) self.sampler = self.make_sampler() - self.buffer = None # type: Optional[ProfileBuffer] - self.pid = None # type: Optional[int] + self.buffer: Optional[ProfileBuffer] = None + self.pid: Optional[int] = None self.running = False self.soft_shutdown = False - self.new_profiles = deque(maxlen=128) # type: Deque[ContinuousProfile] - self.active_profiles = set() # type: Set[ContinuousProfile] + self.new_profiles: Deque[ContinuousProfile] = deque(maxlen=128) + self.active_profiles: Set[ContinuousProfile] = set() - def is_auto_start_enabled(self): - # type: () -> bool + def is_auto_start_enabled(self) -> bool: # Ensure that the scheduler only autostarts once per process. # This is necessary because many web servers use forks to spawn @@ -258,8 +229,7 @@ def is_auto_start_enabled(self): return experiments.get("continuous_profiling_auto_start") - def auto_start(self): - # type: () -> Union[ContinuousProfile, None] + def auto_start(self) -> Union[ContinuousProfile, None]: if not self.sampled: return None @@ -275,8 +245,7 @@ def auto_start(self): return profile - def manual_start(self): - # type: () -> None + def manual_start(self) -> None: if not self.sampled: return @@ -285,48 +254,40 @@ def manual_start(self): self.ensure_running() - def manual_stop(self): - # type: () -> None + def manual_stop(self) -> None: if self.lifecycle != "manual": return self.teardown() - def ensure_running(self): - # type: () -> None + def ensure_running(self) -> None: raise NotImplementedError - def teardown(self): - # type: () -> None + def teardown(self) -> None: raise NotImplementedError - def pause(self): - # type: () -> None + def pause(self) -> None: raise NotImplementedError - def reset_buffer(self): - # type: () -> None + def reset_buffer(self) -> None: self.buffer = ProfileBuffer( self.options, self.sdk_info, PROFILE_BUFFER_SECONDS, self.capture_func ) @property - def profiler_id(self): - # type: () -> Union[str, None] + def profiler_id(self) -> Union[str, None]: if self.buffer is None: return None return self.buffer.profiler_id - def make_sampler(self): - # type: () -> Callable[..., bool] + def make_sampler(self) -> Callable[..., bool]: cwd = os.getcwd() cache = LRUCache(max_size=256) if self.lifecycle == "trace": - def _sample_stack(*args, **kwargs): - # type: (*Any, **Any) -> bool + def _sample_stack(*args: Any, **kwargs: Any) -> bool: """ Take a sample of the stack on all the threads in the process. This should be called at a regular interval to collect samples. @@ -391,8 +352,7 @@ def _sample_stack(*args, **kwargs): else: - def _sample_stack(*args, **kwargs): - # type: (*Any, **Any) -> bool + def _sample_stack(*args: Any, **kwargs: Any) -> bool: """ Take a sample of the stack on all the threads in the process. This should be called at a regular interval to collect samples. @@ -418,8 +378,7 @@ def _sample_stack(*args, **kwargs): return _sample_stack - def run(self): - # type: () -> None + def run(self) -> None: last = time.perf_counter() while self.running: @@ -456,18 +415,22 @@ class ThreadContinuousScheduler(ContinuousScheduler): the sampler at a regular interval. """ - mode = "thread" # type: ContinuousProfilerMode + mode: ContinuousProfilerMode = "thread" name = "sentry.profiler.ThreadContinuousScheduler" - def __init__(self, frequency, options, sdk_info, capture_func): - # type: (int, Dict[str, Any], SDKInfo, Callable[[Envelope], None]) -> None + def __init__( + self, + frequency: int, + options: Dict[str, Any], + sdk_info: SDKInfo, + capture_func: Callable[[Envelope], None], + ) -> None: super().__init__(frequency, options, sdk_info, capture_func) - self.thread = None # type: Optional[threading.Thread] + self.thread: Optional[threading.Thread] = None self.lock = threading.Lock() - def ensure_running(self): - # type: () -> None + def ensure_running(self) -> None: self.soft_shutdown = False @@ -504,8 +467,7 @@ def ensure_running(self): self.running = False self.thread = None - def teardown(self): - # type: () -> None + def teardown(self) -> None: if self.running: self.running = False @@ -530,21 +492,25 @@ class GeventContinuousScheduler(ContinuousScheduler): results in a sample containing only the sampler's code. """ - mode = "gevent" # type: ContinuousProfilerMode + mode: ContinuousProfilerMode = "gevent" - def __init__(self, frequency, options, sdk_info, capture_func): - # type: (int, Dict[str, Any], SDKInfo, Callable[[Envelope], None]) -> None + def __init__( + self, + frequency: int, + options: Dict[str, Any], + sdk_info: SDKInfo, + capture_func: Callable[[Envelope], None], + ) -> None: if ThreadPool is None: raise ValueError("Profiler mode: {} is not available".format(self.mode)) super().__init__(frequency, options, sdk_info, capture_func) - self.thread = None # type: Optional[_ThreadPool] + self.thread: Optional[_ThreadPool] = None self.lock = threading.Lock() - def ensure_running(self): - # type: () -> None + def ensure_running(self) -> None: self.soft_shutdown = False @@ -577,8 +543,7 @@ def ensure_running(self): self.running = False self.thread = None - def teardown(self): - # type: () -> None + def teardown(self) -> None: if self.running: self.running = False @@ -593,8 +558,13 @@ def teardown(self): class ProfileBuffer: - def __init__(self, options, sdk_info, buffer_size, capture_func): - # type: (Dict[str, Any], SDKInfo, int, Callable[[Envelope], None]) -> None + def __init__( + self, + options: Dict[str, Any], + sdk_info: SDKInfo, + buffer_size: int, + capture_func: Callable[[Envelope], None], + ) -> None: self.options = options self.sdk_info = sdk_info self.buffer_size = buffer_size @@ -616,8 +586,7 @@ def __init__(self, options, sdk_info, buffer_size, capture_func): datetime.now(timezone.utc).timestamp() - self.start_monotonic_time ) - def write(self, monotonic_time, sample): - # type: (float, ExtractedSample) -> None + def write(self, monotonic_time: float, sample: ExtractedSample) -> None: if self.should_flush(monotonic_time): self.flush() self.chunk = ProfileChunk() @@ -625,15 +594,13 @@ def write(self, monotonic_time, sample): self.chunk.write(self.start_timestamp + monotonic_time, sample) - def should_flush(self, monotonic_time): - # type: (float) -> bool + def should_flush(self, monotonic_time: float) -> bool: # If the delta between the new monotonic time and the start monotonic time # exceeds the buffer size, it means we should flush the chunk return monotonic_time - self.start_monotonic_time >= self.buffer_size - def flush(self): - # type: () -> None + def flush(self) -> None: chunk = self.chunk.to_json(self.profiler_id, self.options, self.sdk_info) envelope = Envelope() envelope.add_profile_chunk(chunk) @@ -641,18 +608,16 @@ def flush(self): class ProfileChunk: - def __init__(self): - # type: () -> None + def __init__(self) -> None: self.chunk_id = uuid.uuid4().hex - self.indexed_frames = {} # type: Dict[FrameId, int] - self.indexed_stacks = {} # type: Dict[StackId, int] - self.frames = [] # type: List[ProcessedFrame] - self.stacks = [] # type: List[ProcessedStack] - self.samples = [] # type: List[ProcessedSample] + self.indexed_frames: Dict[FrameId, int] = {} + self.indexed_stacks: Dict[StackId, int] = {} + self.frames: List[ProcessedFrame] = [] + self.stacks: List[ProcessedStack] = [] + self.samples: List[ProcessedSample] = [] - def write(self, ts, sample): - # type: (float, ExtractedSample) -> None + def write(self, ts: float, sample: ExtractedSample) -> None: for tid, (stack_id, frame_ids, frames) in sample: try: # Check if the stack is indexed first, this lets us skip @@ -680,8 +645,9 @@ def write(self, ts, sample): # When this happens, we abandon the current sample as it's bad. capture_internal_exception(sys.exc_info()) - def to_json(self, profiler_id, options, sdk_info): - # type: (str, Dict[str, Any], SDKInfo) -> Dict[str, Any] + def to_json( + self, profiler_id: str, options: Dict[str, Any], sdk_info: SDKInfo + ) -> Dict[str, Any]: profile = { "frames": self.frames, "stacks": self.stacks, diff --git a/sentry_sdk/profiler/transaction_profiler.py b/sentry_sdk/profiler/transaction_profiler.py index 3743b7c905..f60dd95a87 100644 --- a/sentry_sdk/profiler/transaction_profiler.py +++ b/sentry_sdk/profiler/transaction_profiler.py @@ -25,6 +25,7 @@ SOFTWARE. """ +from __future__ import annotations import atexit import os import platform @@ -33,7 +34,6 @@ import threading import time import uuid -import warnings from abc import ABC, abstractmethod from collections import deque @@ -49,7 +49,6 @@ is_gevent, is_valid_sample_rate, logger, - nanosecond_time, set_in_app_in_frames, ) @@ -101,7 +100,7 @@ from gevent.monkey import get_original from gevent.threadpool import ThreadPool as _ThreadPool - ThreadPool = _ThreadPool # type: Optional[Type[_ThreadPool]] + ThreadPool: Optional[Type[_ThreadPool]] = _ThreadPool thread_sleep = get_original("time", "sleep") except ImportError: thread_sleep = time.sleep @@ -109,7 +108,7 @@ ThreadPool = None -_scheduler = None # type: Optional[Scheduler] +_scheduler: Optional[Scheduler] = None # The minimum number of unique samples that must exist in a profile to be @@ -117,8 +116,7 @@ PROFILE_MINIMUM_SAMPLES = 2 -def has_profiling_enabled(options): - # type: (Dict[str, Any]) -> bool +def has_profiling_enabled(options: Dict[str, Any]) -> bool: profiles_sampler = options["profiles_sampler"] if profiles_sampler is not None: return True @@ -127,21 +125,10 @@ def has_profiling_enabled(options): if profiles_sample_rate is not None and profiles_sample_rate > 0: return True - profiles_sample_rate = options["_experiments"].get("profiles_sample_rate") - if profiles_sample_rate is not None: - logger.warning( - "_experiments['profiles_sample_rate'] is deprecated. " - "Please use the non-experimental profiles_sample_rate option " - "directly." - ) - if profiles_sample_rate > 0: - return True - return False -def setup_profiler(options): - # type: (Dict[str, Any]) -> bool +def setup_profiler(options: Dict[str, Any]) -> bool: global _scheduler if _scheduler is not None: @@ -159,16 +146,9 @@ def setup_profiler(options): else: default_profiler_mode = ThreadScheduler.mode + profiler_mode = default_profiler_mode if options.get("profiler_mode") is not None: profiler_mode = options["profiler_mode"] - else: - profiler_mode = options.get("_experiments", {}).get("profiler_mode") - if profiler_mode is not None: - logger.warning( - "_experiments['profiler_mode'] is deprecated. Please use the " - "non-experimental profiler_mode option directly." - ) - profiler_mode = profiler_mode or default_profiler_mode if ( profiler_mode == ThreadScheduler.mode @@ -191,8 +171,7 @@ def setup_profiler(options): return True -def teardown_profiler(): - # type: () -> None +def teardown_profiler() -> None: global _scheduler @@ -208,51 +187,38 @@ def teardown_profiler(): class Profile: def __init__( self, - sampled, # type: Optional[bool] - start_ns, # type: int - hub=None, # type: Optional[sentry_sdk.Hub] - scheduler=None, # type: Optional[Scheduler] - ): - # type: (...) -> None + sampled: Optional[bool], + start_ns: int, + scheduler: Optional[Scheduler] = None, + ) -> None: self.scheduler = _scheduler if scheduler is None else scheduler - self.event_id = uuid.uuid4().hex # type: str + self.event_id: str = uuid.uuid4().hex - self.sampled = sampled # type: Optional[bool] + self.sampled: Optional[bool] = sampled # Various framework integrations are capable of overwriting the active thread id. # If it is set to `None` at the end of the profile, we fall back to the default. - self._default_active_thread_id = get_current_thread_meta()[0] or 0 # type: int - self.active_thread_id = None # type: Optional[int] + self._default_active_thread_id: int = get_current_thread_meta()[0] or 0 + self.active_thread_id: Optional[int] = None try: - self.start_ns = start_ns # type: int + self.start_ns: int = start_ns except AttributeError: self.start_ns = 0 - self.stop_ns = 0 # type: int - self.active = False # type: bool + self.stop_ns: int = 0 + self.active: bool = False - self.indexed_frames = {} # type: Dict[FrameId, int] - self.indexed_stacks = {} # type: Dict[StackId, int] - self.frames = [] # type: List[ProcessedFrame] - self.stacks = [] # type: List[ProcessedStack] - self.samples = [] # type: List[ProcessedSample] + self.indexed_frames: Dict[FrameId, int] = {} + self.indexed_stacks: Dict[StackId, int] = {} + self.frames: List[ProcessedFrame] = [] + self.stacks: List[ProcessedStack] = [] + self.samples: List[ProcessedSample] = [] self.unique_samples = 0 - # Backwards compatibility with the old hub property - self._hub = None # type: Optional[sentry_sdk.Hub] - if hub is not None: - self._hub = hub - warnings.warn( - "The `hub` parameter is deprecated. Please do not use it.", - DeprecationWarning, - stacklevel=2, - ) - - def update_active_thread_id(self): - # type: () -> None + def update_active_thread_id(self) -> None: self.active_thread_id = get_current_thread_meta()[0] logger.debug( "[Profiling] updating active thread id to {tid}".format( @@ -260,8 +226,7 @@ def update_active_thread_id(self): ) ) - def _set_initial_sampling_decision(self, sampling_context): - # type: (SamplingContext) -> None + def _set_initial_sampling_decision(self, sampling_context: SamplingContext) -> None: """ Sets the profile's sampling decision according to the following precedence rules: @@ -296,12 +261,11 @@ def _set_initial_sampling_decision(self, sampling_context): options = client.options + sample_rate = None if callable(options.get("profiles_sampler")): sample_rate = options["profiles_sampler"](sampling_context) elif options["profiles_sample_rate"] is not None: sample_rate = options["profiles_sample_rate"] - else: - sample_rate = options["_experiments"].get("profiles_sample_rate") # The profiles_sample_rate option was not set, so profiling # was never enabled. @@ -312,7 +276,8 @@ def _set_initial_sampling_decision(self, sampling_context): self.sampled = False return - if not is_valid_sample_rate(sample_rate, source="Profiling"): + sample_rate = is_valid_sample_rate(sample_rate, source="Profiling") + if sample_rate is None: logger.warning( "[Profiling] Discarding profile because of invalid sample rate." ) @@ -322,19 +287,18 @@ def _set_initial_sampling_decision(self, sampling_context): # Now we roll the dice. random.random is inclusive of 0, but not of 1, # so strict < is safe here. In case sample_rate is a boolean, cast it # to a float (True becomes 1.0 and False becomes 0.0) - self.sampled = random.random() < float(sample_rate) + self.sampled = random.random() < sample_rate if self.sampled: logger.debug("[Profiling] Initializing profile") else: logger.debug( "[Profiling] Discarding profile because it's not included in the random sample (sample rate = {sample_rate})".format( - sample_rate=float(sample_rate) + sample_rate=sample_rate ) ) - def start(self): - # type: () -> None + def start(self) -> None: if not self.sampled or self.active: return @@ -342,21 +306,19 @@ def start(self): logger.debug("[Profiling] Starting profile") self.active = True if not self.start_ns: - self.start_ns = nanosecond_time() + self.start_ns = time.perf_counter_ns() self.scheduler.start_profiling(self) - def stop(self): - # type: () -> None + def stop(self) -> None: if not self.sampled or not self.active: return assert self.scheduler, "No scheduler specified" logger.debug("[Profiling] Stopping profile") self.active = False - self.stop_ns = nanosecond_time() + self.stop_ns = time.perf_counter_ns() - def __enter__(self): - # type: () -> Profile + def __enter__(self) -> Profile: scope = sentry_sdk.get_isolation_scope() old_profile = scope.profile scope.profile = self @@ -367,8 +329,9 @@ def __enter__(self): return self - def __exit__(self, ty, value, tb): - # type: (Optional[Any], Optional[Any], Optional[Any]) -> None + def __exit__( + self, ty: Optional[Any], value: Optional[Any], tb: Optional[Any] + ) -> None: self.stop() scope, old_profile = self._context_manager_state @@ -376,8 +339,7 @@ def __exit__(self, ty, value, tb): scope.profile = old_profile - def write(self, ts, sample): - # type: (int, ExtractedSample) -> None + def write(self, ts: int, sample: ExtractedSample) -> None: if not self.active: return @@ -420,18 +382,17 @@ def write(self, ts, sample): # When this happens, we abandon the current sample as it's bad. capture_internal_exception(sys.exc_info()) - def process(self): - # type: () -> ProcessedProfile + def process(self) -> ProcessedProfile: # This collects the thread metadata at the end of a profile. Doing it # this way means that any threads that terminate before the profile ends # will not have any metadata associated with it. - thread_metadata = { + thread_metadata: Dict[str, ProcessedThreadMetadata] = { str(thread.ident): { "name": str(thread.name), } for thread in threading.enumerate() - } # type: Dict[str, ProcessedThreadMetadata] + } return { "frames": self.frames, @@ -440,8 +401,7 @@ def process(self): "thread_metadata": thread_metadata, } - def to_json(self, event_opt, options): - # type: (Event, Dict[str, Any]) -> Dict[str, Any] + def to_json(self, event_opt: Event, options: Dict[str, Any]) -> Dict[str, Any]: profile = self.process() set_in_app_in_frames( @@ -491,8 +451,7 @@ def to_json(self, event_opt, options): ], } - def valid(self): - # type: () -> bool + def valid(self) -> bool: client = sentry_sdk.get_client() if not client.is_active(): return False @@ -517,61 +476,37 @@ def valid(self): return True - @property - def hub(self): - # type: () -> Optional[sentry_sdk.Hub] - warnings.warn( - "The `hub` attribute is deprecated. Please do not access it.", - DeprecationWarning, - stacklevel=2, - ) - return self._hub - - @hub.setter - def hub(self, value): - # type: (Optional[sentry_sdk.Hub]) -> None - warnings.warn( - "The `hub` attribute is deprecated. Please do not set it.", - DeprecationWarning, - stacklevel=2, - ) - self._hub = value - class Scheduler(ABC): - mode = "unknown" # type: ProfilerMode + mode: ProfilerMode = "unknown" - def __init__(self, frequency): - # type: (int) -> None + def __init__(self, frequency: int) -> None: self.interval = 1.0 / frequency self.sampler = self.make_sampler() # cap the number of new profiles at any time so it does not grow infinitely - self.new_profiles = deque(maxlen=128) # type: Deque[Profile] - self.active_profiles = set() # type: Set[Profile] + self.new_profiles: Deque[Profile] = deque(maxlen=128) + self.active_profiles: Set[Profile] = set() - def __enter__(self): - # type: () -> Scheduler + def __enter__(self) -> Scheduler: self.setup() return self - def __exit__(self, ty, value, tb): - # type: (Optional[Any], Optional[Any], Optional[Any]) -> None + def __exit__( + self, ty: Optional[Any], value: Optional[Any], tb: Optional[Any] + ) -> None: self.teardown() @abstractmethod - def setup(self): - # type: () -> None + def setup(self) -> None: pass @abstractmethod - def teardown(self): - # type: () -> None + def teardown(self) -> None: pass - def ensure_running(self): - # type: () -> None + def ensure_running(self) -> None: """ Ensure the scheduler is running. By default, this method is a no-op. The method should be overridden by any implementation for which it is @@ -579,19 +514,16 @@ def ensure_running(self): """ return None - def start_profiling(self, profile): - # type: (Profile) -> None + def start_profiling(self, profile: Profile) -> None: self.ensure_running() self.new_profiles.append(profile) - def make_sampler(self): - # type: () -> Callable[..., None] + def make_sampler(self) -> Callable[..., None]: cwd = os.getcwd() cache = LRUCache(max_size=256) - def _sample_stack(*args, **kwargs): - # type: (*Any, **Any) -> None + def _sample_stack(*args: Any, **kwargs: Any) -> None: """ Take a sample of the stack on all the threads in the process. This should be called at a regular interval to collect samples. @@ -612,7 +544,7 @@ def _sample_stack(*args, **kwargs): # were started after this point. new_profiles = len(self.new_profiles) - now = nanosecond_time() + now = time.perf_counter_ns() try: sample = [ @@ -662,32 +594,28 @@ class ThreadScheduler(Scheduler): the sampler at a regular interval. """ - mode = "thread" # type: ProfilerMode + mode: ProfilerMode = "thread" name = "sentry.profiler.ThreadScheduler" - def __init__(self, frequency): - # type: (int) -> None + def __init__(self, frequency: int) -> None: super().__init__(frequency=frequency) # used to signal to the thread that it should stop self.running = False - self.thread = None # type: Optional[threading.Thread] - self.pid = None # type: Optional[int] + self.thread: Optional[threading.Thread] = None + self.pid: Optional[int] = None self.lock = threading.Lock() - def setup(self): - # type: () -> None + def setup(self) -> None: pass - def teardown(self): - # type: () -> None + def teardown(self) -> None: if self.running: self.running = False if self.thread is not None: self.thread.join() - def ensure_running(self): - # type: () -> None + def ensure_running(self) -> None: """ Check that the profiler has an active thread to run in, and start one if that's not the case. @@ -725,8 +653,7 @@ def ensure_running(self): self.thread = None return - def run(self): - # type: () -> None + def run(self) -> None: last = time.perf_counter() while self.running: @@ -758,11 +685,10 @@ class GeventScheduler(Scheduler): results in a sample containing only the sampler's code. """ - mode = "gevent" # type: ProfilerMode + mode: ProfilerMode = "gevent" name = "sentry.profiler.GeventScheduler" - def __init__(self, frequency): - # type: (int) -> None + def __init__(self, frequency: int) -> None: if ThreadPool is None: raise ValueError("Profiler mode: {} is not available".format(self.mode)) @@ -771,27 +697,24 @@ def __init__(self, frequency): # used to signal to the thread that it should stop self.running = False - self.thread = None # type: Optional[_ThreadPool] - self.pid = None # type: Optional[int] + self.thread: Optional[_ThreadPool] = None + self.pid: Optional[int] = None # This intentionally uses the gevent patched threading.Lock. # The lock will be required when first trying to start profiles # as we need to spawn the profiler thread from the greenlets. self.lock = threading.Lock() - def setup(self): - # type: () -> None + def setup(self) -> None: pass - def teardown(self): - # type: () -> None + def teardown(self) -> None: if self.running: self.running = False if self.thread is not None: self.thread.join() - def ensure_running(self): - # type: () -> None + def ensure_running(self) -> None: pid = os.getpid() # is running on the right process @@ -818,8 +741,7 @@ def ensure_running(self): self.thread = None return - def run(self): - # type: () -> None + def run(self) -> None: last = time.perf_counter() while self.running: diff --git a/sentry_sdk/profiler/utils.py b/sentry_sdk/profiler/utils.py index 3554cddb5d..40d667dce2 100644 --- a/sentry_sdk/profiler/utils.py +++ b/sentry_sdk/profiler/utils.py @@ -1,3 +1,4 @@ +from __future__ import annotations import os from collections import deque @@ -63,14 +64,12 @@ if PY311: - def get_frame_name(frame): - # type: (FrameType) -> str + def get_frame_name(frame: FrameType) -> str: return frame.f_code.co_qualname else: - def get_frame_name(frame): - # type: (FrameType) -> str + def get_frame_name(frame: FrameType) -> str: f_code = frame.f_code co_varnames = f_code.co_varnames @@ -117,13 +116,11 @@ def get_frame_name(frame): return name -def frame_id(raw_frame): - # type: (FrameType) -> FrameId +def frame_id(raw_frame: FrameType) -> FrameId: return (raw_frame.f_code.co_filename, raw_frame.f_lineno, get_frame_name(raw_frame)) -def extract_frame(fid, raw_frame, cwd): - # type: (FrameId, FrameType, str) -> ProcessedFrame +def extract_frame(fid: FrameId, raw_frame: FrameType, cwd: str) -> ProcessedFrame: abs_path = raw_frame.f_code.co_filename try: @@ -152,12 +149,11 @@ def extract_frame(fid, raw_frame, cwd): def extract_stack( - raw_frame, # type: Optional[FrameType] - cache, # type: LRUCache - cwd, # type: str - max_stack_depth=MAX_STACK_DEPTH, # type: int -): - # type: (...) -> ExtractedStack + raw_frame: Optional[FrameType], + cache: LRUCache, + cwd: str, + max_stack_depth: int = MAX_STACK_DEPTH, +) -> ExtractedStack: """ Extracts the stack starting the specified frame. The extracted stack assumes the specified frame is the top of the stack, and works back @@ -167,7 +163,7 @@ def extract_stack( only the first `MAX_STACK_DEPTH` frames will be returned. """ - raw_frames = deque(maxlen=max_stack_depth) # type: Deque[FrameType] + raw_frames: Deque[FrameType] = deque(maxlen=max_stack_depth) while raw_frame is not None: f_back = raw_frame.f_back diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index 73bf43573e..2f99fe4df3 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -1,3 +1,4 @@ +from __future__ import annotations import os import sys import warnings @@ -11,27 +12,23 @@ from sentry_sdk._types import AnnotatedValue from sentry_sdk.attachments import Attachment -from sentry_sdk.consts import DEFAULT_MAX_BREADCRUMBS, FALSE_VALUES, INSTRUMENTER -from sentry_sdk.feature_flags import FlagBuffer, DEFAULT_FLAG_CAPACITY -from sentry_sdk.profiler.continuous_profiler import ( - get_profiler_id, - try_autostart_continuous_profiler, - try_profile_lifecycle_trace_start, +from sentry_sdk.consts import ( + DEFAULT_MAX_BREADCRUMBS, + FALSE_VALUES, + BAGGAGE_HEADER_NAME, + SENTRY_TRACE_HEADER_NAME, ) +from sentry_sdk.feature_flags import FlagBuffer, DEFAULT_FLAG_CAPACITY from sentry_sdk.profiler.transaction_profiler import Profile from sentry_sdk.session import Session from sentry_sdk.tracing_utils import ( Baggage, has_tracing_enabled, - normalize_incoming_data, PropagationContext, ) from sentry_sdk.tracing import ( - BAGGAGE_HEADER_NAME, - SENTRY_TRACE_HEADER_NAME, NoOpSpan, Span, - Transaction, ) from sentry_sdk.utils import ( capture_internal_exception, @@ -42,29 +39,31 @@ event_from_exception, exc_info_from_error, logger, + safe_str, ) -import typing from typing import TYPE_CHECKING if TYPE_CHECKING: - from collections.abc import Mapping, MutableMapping + from typing import ( + Any, + Callable, + Deque, + Dict, + Generator, + Iterator, + List, + Optional, + ParamSpec, + Tuple, + TypeVar, + Union, + Self, + ) - from typing import Any - from typing import Callable - from typing import Deque - from typing import Dict - from typing import Generator - from typing import Iterator - from typing import List - from typing import Optional - from typing import ParamSpec - from typing import Tuple - from typing import TypeVar - from typing import Union - - from typing_extensions import Unpack + from collections.abc import Mapping, MutableMapping + import sentry_sdk from sentry_sdk._types import ( Breadcrumb, BreadcrumbHint, @@ -74,14 +73,9 @@ ExcInfo, Hint, LogLevelStr, - SamplingContext, Type, ) - from sentry_sdk.tracing import TransactionKwargs - - import sentry_sdk - P = ParamSpec("P") R = TypeVar("R") @@ -93,7 +87,7 @@ # In case this is a http server (think web framework) with multiple users # the data will be added to events of all users. # Typically this is used for process wide data such as the release. -_global_scope = None # type: Optional[Scope] +_global_scope: Optional[Scope] = None # Holds data for the active request. # This is used to isolate data for different requests or users. @@ -105,7 +99,7 @@ # This can be used to manually add additional data to a span. _current_scope = ContextVar("current_scope", default=None) -global_event_processors = [] # type: List[EventProcessor] +global_event_processors: List[EventProcessor] = [] class ScopeType(Enum): @@ -115,43 +109,17 @@ class ScopeType(Enum): MERGED = "merged" -class _ScopeManager: - def __init__(self, hub=None): - # type: (Optional[Any]) -> None - self._old_scopes = [] # type: List[Scope] - - def __enter__(self): - # type: () -> Scope - isolation_scope = Scope.get_isolation_scope() - - self._old_scopes.append(isolation_scope) - - forked_scope = isolation_scope.fork() - _isolation_scope.set(forked_scope) - - return forked_scope - - def __exit__(self, exc_type, exc_value, tb): - # type: (Any, Any, Any) -> None - old_scope = self._old_scopes.pop() - _isolation_scope.set(old_scope) - - -def add_global_event_processor(processor): - # type: (EventProcessor) -> None +def add_global_event_processor(processor: EventProcessor) -> None: global_event_processors.append(processor) -def _attr_setter(fn): - # type: (Any) -> Any +def _attr_setter(fn: Any) -> Any: return property(fset=fn, doc=fn.__doc__) -def _disable_capture(fn): - # type: (F) -> F +def _disable_capture(fn: F) -> F: @wraps(fn) - def wrapper(self, *args, **kwargs): - # type: (Any, *Dict[str, Any], **Any) -> Any + def wrapper(self: Any, *args: Any, **kwargs: Any) -> Any: if not self._should_capture: return try: @@ -203,34 +171,29 @@ class Scope: "_flags", ) - def __init__(self, ty=None, client=None): - # type: (Optional[ScopeType], Optional[sentry_sdk.Client]) -> None + def __init__(self, ty: Optional[ScopeType] = None) -> None: self._type = ty - self._event_processors = [] # type: List[EventProcessor] - self._error_processors = [] # type: List[ErrorProcessor] + self._event_processors: List[EventProcessor] = [] + self._error_processors: List[ErrorProcessor] = [] - self._name = None # type: Optional[str] - self._propagation_context = None # type: Optional[PropagationContext] - self._n_breadcrumbs_truncated = 0 # type: int + self._name: Optional[str] = None + self._propagation_context: Optional[PropagationContext] = None + self._n_breadcrumbs_truncated: int = 0 - self.client = NonRecordingClient() # type: sentry_sdk.client.BaseClient - - if client is not None: - self.set_client(client) + self.client: sentry_sdk.client.BaseClient = NonRecordingClient() self.clear() incoming_trace_information = self._load_trace_data_from_env() self.generate_propagation_context(incoming_data=incoming_trace_information) - def __copy__(self): - # type: () -> Scope + def __copy__(self) -> Self: """ Returns a copy of this scope. This also creates a copy of all referenced data structures. """ - rv = object.__new__(self.__class__) # type: Scope + rv: Self = object.__new__(self.__class__) rv._type = self._type rv.client = self.client @@ -266,14 +229,13 @@ def __copy__(self): return rv @classmethod - def get_current_scope(cls): - # type: () -> Scope + def get_current_scope(cls) -> Scope: """ .. versionadded:: 2.0.0 Returns the current scope. """ - current_scope = _current_scope.get() + current_scope = cls._get_current_scope() if current_scope is None: current_scope = Scope(ty=ScopeType.CURRENT) _current_scope.set(current_scope) @@ -281,8 +243,14 @@ def get_current_scope(cls): return current_scope @classmethod - def set_current_scope(cls, new_current_scope): - # type: (Scope) -> None + def _get_current_scope(cls) -> Optional[Scope]: + """ + Returns the current scope without creating a new one. Internal use only. + """ + return _current_scope.get() + + @classmethod + def set_current_scope(cls, new_current_scope: Scope) -> None: """ .. versionadded:: 2.0.0 @@ -292,14 +260,13 @@ def set_current_scope(cls, new_current_scope): _current_scope.set(new_current_scope) @classmethod - def get_isolation_scope(cls): - # type: () -> Scope + def get_isolation_scope(cls) -> Scope: """ .. versionadded:: 2.0.0 Returns the isolation scope. """ - isolation_scope = _isolation_scope.get() + isolation_scope = cls._get_isolation_scope() if isolation_scope is None: isolation_scope = Scope(ty=ScopeType.ISOLATION) _isolation_scope.set(isolation_scope) @@ -307,8 +274,14 @@ def get_isolation_scope(cls): return isolation_scope @classmethod - def set_isolation_scope(cls, new_isolation_scope): - # type: (Scope) -> None + def _get_isolation_scope(cls) -> Optional[Scope]: + """ + Returns the isolation scope without creating a new one. Internal use only. + """ + return _isolation_scope.get() + + @classmethod + def set_isolation_scope(cls, new_isolation_scope: Scope) -> None: """ .. versionadded:: 2.0.0 @@ -318,8 +291,7 @@ def set_isolation_scope(cls, new_isolation_scope): _isolation_scope.set(new_isolation_scope) @classmethod - def get_global_scope(cls): - # type: () -> Scope + def get_global_scope(cls) -> Scope: """ .. versionadded:: 2.0.0 @@ -332,8 +304,7 @@ def get_global_scope(cls): return _global_scope @classmethod - def last_event_id(cls): - # type: () -> Optional[str] + def last_event_id(cls) -> Optional[str]: """ .. versionadded:: 2.2.0 @@ -348,8 +319,11 @@ def last_event_id(cls): """ return cls.get_isolation_scope()._last_event_id - def _merge_scopes(self, additional_scope=None, additional_scope_kwargs=None): - # type: (Optional[Scope], Optional[Dict[str, Any]]) -> Scope + def _merge_scopes( + self, + additional_scope: Optional[Scope] = None, + additional_scope_kwargs: Optional[Dict[str, Any]] = None, + ) -> Self: """ Merges global, isolation and current scope into a new scope and adds the given additional scope or additional scope kwargs to it. @@ -357,16 +331,17 @@ def _merge_scopes(self, additional_scope=None, additional_scope_kwargs=None): if additional_scope and additional_scope_kwargs: raise TypeError("cannot provide scope and kwargs") - final_scope = copy(_global_scope) if _global_scope is not None else Scope() + final_scope = self.__class__() final_scope._type = ScopeType.MERGED - isolation_scope = _isolation_scope.get() - if isolation_scope is not None: - final_scope.update_from_scope(isolation_scope) + global_scope = self.get_global_scope() + final_scope.update_from_scope(global_scope) + + isolation_scope = self.get_isolation_scope() + final_scope.update_from_scope(self.get_isolation_scope()) - current_scope = _current_scope.get() - if current_scope is not None: - final_scope.update_from_scope(current_scope) + current_scope = self.get_current_scope() + final_scope.update_from_scope(current_scope) if self != current_scope and self != isolation_scope: final_scope.update_from_scope(self) @@ -383,8 +358,7 @@ def _merge_scopes(self, additional_scope=None, additional_scope_kwargs=None): return final_scope @classmethod - def get_client(cls): - # type: () -> sentry_sdk.client.BaseClient + def get_client(cls) -> sentry_sdk.client.BaseClient: """ .. versionadded:: 2.0.0 @@ -392,7 +366,7 @@ def get_client(cls): This checks the current scope, the isolation scope and the global scope for a client. If no client is available a :py:class:`sentry_sdk.client.NonRecordingClient` is returned. """ - current_scope = _current_scope.get() + current_scope = cls.get_current_scope() try: client = current_scope.client except AttributeError: @@ -401,7 +375,7 @@ def get_client(cls): if client is not None and client.is_active(): return client - isolation_scope = _isolation_scope.get() + isolation_scope = cls.get_isolation_scope() try: client = isolation_scope.client except AttributeError: @@ -410,18 +384,18 @@ def get_client(cls): if client is not None and client.is_active(): return client - try: - client = _global_scope.client # type: ignore - except AttributeError: - client = None + if _global_scope: + try: + client = _global_scope.client + except AttributeError: + client = None if client is not None and client.is_active(): return client return NonRecordingClient() - def set_client(self, client=None): - # type: (Optional[sentry_sdk.client.BaseClient]) -> None + def set_client(self, client: Optional[sentry_sdk.client.BaseClient] = None) -> None: """ .. versionadded:: 2.0.0 @@ -433,8 +407,7 @@ def set_client(self, client=None): """ self.client = client if client is not None else NonRecordingClient() - def fork(self): - # type: () -> Scope + def fork(self) -> Self: """ .. versionadded:: 2.0.0 @@ -443,8 +416,7 @@ def fork(self): forked_scope = copy(self) return forked_scope - def _load_trace_data_from_env(self): - # type: () -> Optional[Dict[str, str]] + def _load_trace_data_from_env(self) -> Optional[Dict[str, str]]: """ Load Sentry trace id and baggage from environment variables. Can be disabled by setting SENTRY_USE_ENVIRONMENT to "false". @@ -470,15 +442,15 @@ def _load_trace_data_from_env(self): return incoming_trace_information or None - def set_new_propagation_context(self): - # type: () -> None + def set_new_propagation_context(self) -> None: """ Creates a new propagation context and sets it as `_propagation_context`. Overwriting existing one. """ self._propagation_context = PropagationContext() - def generate_propagation_context(self, incoming_data=None): - # type: (Optional[Dict[str, str]]) -> None + def generate_propagation_context( + self, incoming_data: Optional[dict[str, str]] = None + ) -> None: """ Makes sure the propagation context is set on the scope. If there is `incoming_data` overwrite existing propagation context. @@ -493,25 +465,14 @@ def generate_propagation_context(self, incoming_data=None): if self._propagation_context is None: self.set_new_propagation_context() - def get_dynamic_sampling_context(self): - # type: () -> Optional[Dict[str, str]] + def get_dynamic_sampling_context(self) -> Optional[Dict[str, str]]: """ - Returns the Dynamic Sampling Context from the Propagation Context. - If not existing, creates a new one. + Returns the Dynamic Sampling Context from the baggage or populates one. """ - if self._propagation_context is None: - return None - baggage = self.get_baggage() - if baggage is not None: - self._propagation_context.dynamic_sampling_context = ( - baggage.dynamic_sampling_context() - ) - - return self._propagation_context.dynamic_sampling_context + return baggage.dynamic_sampling_context() if baggage else None - def get_traceparent(self, *args, **kwargs): - # type: (Any, Any) -> Optional[str] + def get_traceparent(self, *args: Any, **kwargs: Any) -> Optional[str]: """ Returns the Sentry "sentry-trace" header (aka the traceparent) from the currently active span or the scopes Propagation Context. @@ -519,74 +480,67 @@ def get_traceparent(self, *args, **kwargs): client = self.get_client() # If we have an active span, return traceparent from there - if has_tracing_enabled(client.options) and self.span is not None: + if ( + has_tracing_enabled(client.options) + and self.span is not None + and self.span.is_valid + ): return self.span.to_traceparent() # If this scope has a propagation context, return traceparent from there if self._propagation_context is not None: - traceparent = "%s-%s" % ( - self._propagation_context.trace_id, - self._propagation_context.span_id, - ) - return traceparent + return self._propagation_context.to_traceparent() # Fall back to isolation scope's traceparent. It always has one return self.get_isolation_scope().get_traceparent() - def get_baggage(self, *args, **kwargs): - # type: (Any, Any) -> Optional[Baggage] + def get_baggage(self, *args: Any, **kwargs: Any) -> Optional[Baggage]: """ Returns the Sentry "baggage" header containing trace information from the currently active span or the scopes Propagation Context. + If not existing, creates a new one. """ client = self.get_client() # If we have an active span, return baggage from there - if has_tracing_enabled(client.options) and self.span is not None: + if ( + has_tracing_enabled(client.options) + and self.span is not None + and self.span.is_valid + ): return self.span.to_baggage() # If this scope has a propagation context, return baggage from there + # populate a fresh one if it doesn't exist if self._propagation_context is not None: - dynamic_sampling_context = ( - self._propagation_context.dynamic_sampling_context - ) - if dynamic_sampling_context is None: - return Baggage.from_options(self) - else: - return Baggage(dynamic_sampling_context) + if self._propagation_context.baggage is None: + self._propagation_context.baggage = Baggage.from_options(self) + return self._propagation_context.baggage # Fall back to isolation scope's baggage. It always has one return self.get_isolation_scope().get_baggage() - def get_trace_context(self): - # type: () -> Any + def get_trace_context(self) -> Any: """ Returns the Sentry "trace" context from the Propagation Context. """ if self._propagation_context is None: return None - trace_context = { + trace_context: Dict[str, Any] = { "trace_id": self._propagation_context.trace_id, "span_id": self._propagation_context.span_id, "parent_span_id": self._propagation_context.parent_span_id, "dynamic_sampling_context": self.get_dynamic_sampling_context(), - } # type: Dict[str, Any] + } return trace_context - def trace_propagation_meta(self, *args, **kwargs): - # type: (*Any, **Any) -> str + def trace_propagation_meta(self, *args: Any, **kwargs: Any) -> str: """ Return meta tags which should be injected into HTML templates to allow propagation of trace information. """ - span = kwargs.pop("span", None) - if span is not None: - logger.warning( - "The parameter `span` in trace_propagation_meta() is deprecated and will be removed in the future." - ) - meta = "" sentry_trace = self.get_traceparent() @@ -605,8 +559,7 @@ def trace_propagation_meta(self, *args, **kwargs): return meta - def iter_headers(self): - # type: () -> Iterator[Tuple[str, str]] + def iter_headers(self) -> Iterator[Tuple[str, str]]: """ Creates a generator which returns the `sentry-trace` and `baggage` headers from the Propagation Context. """ @@ -615,13 +568,13 @@ def iter_headers(self): if traceparent is not None: yield SENTRY_TRACE_HEADER_NAME, traceparent - dsc = self.get_dynamic_sampling_context() - if dsc is not None: - baggage = Baggage(dsc).serialize() - yield BAGGAGE_HEADER_NAME, baggage + baggage = self.get_baggage() + if baggage is not None: + yield BAGGAGE_HEADER_NAME, baggage.serialize() - def iter_trace_propagation_headers(self, *args, **kwargs): - # type: (Any, Any) -> Generator[Tuple[str, str], None, None] + def iter_trace_propagation_headers( + self, *args: Any, **kwargs: Any + ) -> Generator[Tuple[str, str], None, None]: """ Return HTTP headers which allow propagation of trace data. @@ -629,18 +582,11 @@ def iter_trace_propagation_headers(self, *args, **kwargs): If no span is given, the trace data is taken from the scope. """ client = self.get_client() - if not client.options.get("propagate_traces"): - warnings.warn( - "The `propagate_traces` parameter is deprecated. Please use `trace_propagation_targets` instead.", - DeprecationWarning, - stacklevel=2, - ) - return span = kwargs.pop("span", None) span = span or self.span - if has_tracing_enabled(client.options) and span is not None: + if has_tracing_enabled(client.options) and span is not None and span.is_valid: for header in span.iter_headers(): yield header else: @@ -662,8 +608,7 @@ def iter_trace_propagation_headers(self, *args, **kwargs): for header in isolation_scope.iter_headers(): yield header - def get_active_propagation_context(self): - # type: () -> Optional[PropagationContext] + def get_active_propagation_context(self) -> Optional[PropagationContext]: if self._propagation_context is not None: return self._propagation_context @@ -677,54 +622,35 @@ def get_active_propagation_context(self): return None - def clear(self): - # type: () -> None + def clear(self) -> None: """Clears the entire scope.""" - self._level = None # type: Optional[LogLevelStr] - self._fingerprint = None # type: Optional[List[str]] - self._transaction = None # type: Optional[str] - self._transaction_info = {} # type: MutableMapping[str, str] - self._user = None # type: Optional[Dict[str, Any]] + self._level: Optional[LogLevelStr] = None + self._fingerprint: Optional[List[str]] = None + self._transaction: Optional[str] = None + self._transaction_info: MutableMapping[str, str] = {} + self._user: Optional[Dict[str, Any]] = None - self._tags = {} # type: Dict[str, Any] - self._contexts = {} # type: Dict[str, Dict[str, Any]] - self._extras = {} # type: MutableMapping[str, Any] - self._attachments = [] # type: List[Attachment] + self._tags: Dict[str, Any] = {} + self._contexts: Dict[str, Dict[str, Any]] = {} + self._extras: MutableMapping[str, Any] = {} + self._attachments: List[Attachment] = [] self.clear_breadcrumbs() - self._should_capture = True # type: bool + self._should_capture: bool = True - self._span = None # type: Optional[Span] - self._session = None # type: Optional[Session] - self._force_auto_session_tracking = None # type: Optional[bool] + self._span: Optional[Span] = None + self._session: Optional[Session] = None + self._force_auto_session_tracking: Optional[bool] = None - self._profile = None # type: Optional[Profile] + self._profile: Optional[Profile] = None self._propagation_context = None # self._last_event_id is only applicable to isolation scopes - self._last_event_id = None # type: Optional[str] - self._flags = None # type: Optional[FlagBuffer] - - @_attr_setter - def level(self, value): - # type: (LogLevelStr) -> None - """ - When set this overrides the level. + self._last_event_id: Optional[str] = None + self._flags: Optional[FlagBuffer] = None - .. deprecated:: 1.0.0 - Use :func:`set_level` instead. - - :param value: The level to set. - """ - logger.warning( - "Deprecated: use .set_level() instead. This will be removed in the future." - ) - - self._level = value - - def set_level(self, value): - # type: (LogLevelStr) -> None + def set_level(self, value: LogLevelStr) -> None: """ Sets the level for the scope. @@ -733,80 +659,39 @@ def set_level(self, value): self._level = value @_attr_setter - def fingerprint(self, value): - # type: (Optional[List[str]]) -> None + def fingerprint(self, value: Optional[List[str]]) -> None: """When set this overrides the default fingerprint.""" self._fingerprint = value @property - def transaction(self): - # type: () -> Any - # would be type: () -> Optional[Transaction], see https://github.com/python/mypy/issues/3004 - """Return the transaction (root span) in the scope, if any.""" - - # there is no span/transaction on the scope + def root_span(self) -> Optional[Span]: + """Return the root span in the scope, if any.""" if self._span is None: return None - # there is an orphan span on the scope - if self._span.containing_transaction is None: - return None - - # there is either a transaction (which is its own containing - # transaction) or a non-orphan span on the scope - return self._span.containing_transaction - - @transaction.setter - def transaction(self, value): - # type: (Any) -> None - # would be type: (Optional[str]) -> None, see https://github.com/python/mypy/issues/3004 - """When set this forces a specific transaction name to be set. - - Deprecated: use set_transaction_name instead.""" - - # XXX: the docstring above is misleading. The implementation of - # apply_to_event prefers an existing value of event.transaction over - # anything set in the scope. - # XXX: note that with the introduction of the Scope.transaction getter, - # there is a semantic and type mismatch between getter and setter. The - # getter returns a Transaction, the setter sets a transaction name. - # Without breaking version compatibility, we could make the setter set a - # transaction name or transaction (self._span) depending on the type of - # the value argument. - - logger.warning( - "Assigning to scope.transaction directly is deprecated: use scope.set_transaction_name() instead." - ) - self._transaction = value - if self._span and self._span.containing_transaction: - self._span.containing_transaction.name = value + return self._span.root_span - def set_transaction_name(self, name, source=None): - # type: (str, Optional[str]) -> None + def set_transaction_name(self, name: str, source: Optional[str] = None) -> None: """Set the transaction name and optionally the transaction source.""" self._transaction = name - if self._span and self._span.containing_transaction: - self._span.containing_transaction.name = name + if self._span and self._span.root_span: + self._span.root_span.name = name if source: - self._span.containing_transaction.source = source + self._span.root_span.source = source if source: self._transaction_info["source"] = source - @_attr_setter - def user(self, value): - # type: (Optional[Dict[str, Any]]) -> None - """When set a specific user is bound to the scope. Deprecated in favor of set_user.""" - warnings.warn( - "The `Scope.user` setter is deprecated in favor of `Scope.set_user()`.", - DeprecationWarning, - stacklevel=2, - ) - self.set_user(value) + @property + def transaction_name(self) -> Optional[str]: + return self._transaction - def set_user(self, value): - # type: (Optional[Dict[str, Any]]) -> None + @property + def transaction_source(self) -> Optional[str]: + return self._transaction_info.get("source") + + def set_user(self, value: Optional[Dict[str, Any]]) -> None: """Sets a user for the scope.""" self._user = value session = self.get_isolation_scope()._session @@ -814,37 +699,20 @@ def set_user(self, value): session.update(user=value) @property - def span(self): - # type: () -> Optional[Span] - """Get/set current tracing span or transaction.""" + def span(self) -> Optional[Span]: + """Get current tracing span.""" return self._span - @span.setter - def span(self, span): - # type: (Optional[Span]) -> None - self._span = span - # XXX: this differs from the implementation in JS, there Scope.setSpan - # does not set Scope._transactionName. - if isinstance(span, Transaction): - transaction = span - if transaction.name: - self._transaction = transaction.name - if transaction.source: - self._transaction_info["source"] = transaction.source - @property - def profile(self): - # type: () -> Optional[Profile] + def profile(self) -> Optional[Profile]: return self._profile @profile.setter - def profile(self, profile): - # type: (Optional[Profile]) -> None + def profile(self, profile: Optional[Profile]) -> None: self._profile = profile - def set_tag(self, key, value): - # type: (str, Any) -> None + def set_tag(self, key: str, value: Any) -> None: """ Sets a tag for a key to a specific value. @@ -854,8 +722,7 @@ def set_tag(self, key, value): """ self._tags[key] = value - def set_tags(self, tags): - # type: (Mapping[str, object]) -> None + def set_tags(self, tags: Mapping[str, object]) -> None: """Sets multiple tags at once. This method updates multiple tags at once. The tags are passed as a dictionary @@ -873,8 +740,7 @@ def set_tags(self, tags): """ self._tags.update(tags) - def remove_tag(self, key): - # type: (str) -> None + def remove_tag(self, key: str) -> None: """ Removes a specific tag. @@ -884,53 +750,46 @@ def remove_tag(self, key): def set_context( self, - key, # type: str - value, # type: Dict[str, Any] - ): - # type: (...) -> None + key: str, + value: Dict[str, Any], + ) -> None: """ Binds a context at a certain key to a specific value. """ self._contexts[key] = value def remove_context( - self, key # type: str - ): - # type: (...) -> None + self, + key: str, + ) -> None: """Removes a context.""" self._contexts.pop(key, None) def set_extra( self, - key, # type: str - value, # type: Any - ): - # type: (...) -> None + key: str, + value: Any, + ) -> None: """Sets an extra key to a specific value.""" self._extras[key] = value - def remove_extra( - self, key # type: str - ): - # type: (...) -> None + def remove_extra(self, key: str) -> None: """Removes a specific extra key.""" self._extras.pop(key, None) - def clear_breadcrumbs(self): - # type: () -> None + def clear_breadcrumbs(self) -> None: """Clears breadcrumb buffer.""" - self._breadcrumbs = deque() # type: Deque[Breadcrumb] + self._breadcrumbs: Deque[Breadcrumb] = deque() self._n_breadcrumbs_truncated = 0 def add_attachment( self, - bytes=None, # type: Union[None, bytes, Callable[[], bytes]] - filename=None, # type: Optional[str] - path=None, # type: Optional[str] - content_type=None, # type: Optional[str] - add_to_transactions=False, # type: bool - ): - # type: (...) -> None + bytes: Union[None, bytes, Callable[[], bytes]] = None, + filename: Optional[str] = None, + path: Optional[str] = None, + content_type: Optional[str] = None, + add_to_transactions: bool = False, + ) -> None: """Adds an attachment to future events sent from this scope. The parameters are the same as for the :py:class:`sentry_sdk.attachments.Attachment` constructor. @@ -945,8 +804,12 @@ def add_attachment( ) ) - def add_breadcrumb(self, crumb=None, hint=None, **kwargs): - # type: (Optional[Breadcrumb], Optional[BreadcrumbHint], Any) -> None + def add_breadcrumb( + self, + crumb: Optional[Breadcrumb] = None, + hint: Optional[BreadcrumbHint] = None, + **kwargs: Any, + ) -> None: """ Adds a breadcrumb. @@ -964,12 +827,12 @@ def add_breadcrumb(self, crumb=None, hint=None, **kwargs): before_breadcrumb = client.options.get("before_breadcrumb") max_breadcrumbs = client.options.get("max_breadcrumbs", DEFAULT_MAX_BREADCRUMBS) - crumb = dict(crumb or ()) # type: Breadcrumb + crumb: Breadcrumb = dict(crumb or ()) crumb.update(kwargs) if not crumb: return - hint = dict(hint or ()) # type: Hint + hint: Hint = dict(hint or ()) if crumb.get("timestamp") is None: crumb["timestamp"] = datetime.now(timezone.utc) @@ -990,198 +853,48 @@ def add_breadcrumb(self, crumb=None, hint=None, **kwargs): self._breadcrumbs.popleft() self._n_breadcrumbs_truncated += 1 - def start_transaction( - self, - transaction=None, - instrumenter=INSTRUMENTER.SENTRY, - custom_sampling_context=None, - **kwargs, - ): - # type: (Optional[Transaction], str, Optional[SamplingContext], Unpack[TransactionKwargs]) -> Union[Transaction, NoOpSpan] + def start_transaction(self, **kwargs: Any) -> Union[NoOpSpan, Span]: """ - Start and return a transaction. - - Start an existing transaction if given, otherwise create and start a new - transaction with kwargs. - - This is the entry point to manual tracing instrumentation. - - A tree structure can be built by adding child spans to the transaction, - and child spans to other spans. To start a new child span within the - transaction or any span, call the respective `.start_child()` method. - - Every child span must be finished before the transaction is finished, - otherwise the unfinished spans are discarded. - - When used as context managers, spans and transactions are automatically - finished at the end of the `with` block. If not using context managers, - call the `.finish()` method. - - When the transaction is finished, it will be sent to Sentry with all its - finished child spans. - - :param transaction: The transaction to start. If omitted, we create and - start a new transaction. - :param instrumenter: This parameter is meant for internal use only. It - will be removed in the next major version. - :param custom_sampling_context: The transaction's custom sampling context. - :param kwargs: Optional keyword arguments to be passed to the Transaction - constructor. See :py:class:`sentry_sdk.tracing.Transaction` for - available arguments. + .. deprecated:: 3.0.0 + This function is deprecated and will be removed in a future release. + Use :py:meth:`sentry_sdk.start_span` instead. """ - kwargs.setdefault("scope", self) - - client = self.get_client() - - configuration_instrumenter = client.options["instrumenter"] - - if instrumenter != configuration_instrumenter: - return NoOpSpan() - - try_autostart_continuous_profiler() - - custom_sampling_context = custom_sampling_context or {} - - # kwargs at this point has type TransactionKwargs, since we have removed - # the client and custom_sampling_context from it. - transaction_kwargs = kwargs # type: TransactionKwargs - - # if we haven't been given a transaction, make one - if transaction is None: - transaction = Transaction(**transaction_kwargs) - - # use traces_sample_rate, traces_sampler, and/or inheritance to make a - # sampling decision - sampling_context = { - "transaction_context": transaction.to_json(), - "parent_sampled": transaction.parent_sampled, - } - sampling_context.update(custom_sampling_context) - transaction._set_initial_sampling_decision(sampling_context=sampling_context) - - # update the sample rate in the dsc - if transaction.sample_rate is not None: - propagation_context = self.get_active_propagation_context() - if propagation_context: - dsc = propagation_context.dynamic_sampling_context - if dsc is not None: - dsc["sample_rate"] = str(transaction.sample_rate) - if transaction._baggage: - transaction._baggage.sentry_items["sample_rate"] = str( - transaction.sample_rate - ) - - if transaction.sampled: - profile = Profile( - transaction.sampled, transaction._start_timestamp_monotonic_ns - ) - profile._set_initial_sampling_decision(sampling_context=sampling_context) - - transaction._profile = profile - - transaction._continuous_profile = try_profile_lifecycle_trace_start() - - # Typically, the profiler is set when the transaction is created. But when - # using the auto lifecycle, the profiler isn't running when the first - # transaction is started. So make sure we update the profiler id on it. - if transaction._continuous_profile is not None: - transaction.set_profiler_id(get_profiler_id()) - - # we don't bother to keep spans if we already know we're not going to - # send the transaction - max_spans = (client.options["_experiments"].get("max_spans")) or 1000 - transaction.init_span_recorder(maxlen=max_spans) - - return transaction + warnings.warn( + "The `start_transaction` method is deprecated, please use `sentry_sdk.start_span instead.`", + DeprecationWarning, + stacklevel=2, + ) + return NoOpSpan(**kwargs) - def start_span(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): - # type: (str, Any) -> Span + def start_span(self, **kwargs: Any) -> Union[NoOpSpan, Span]: """ - Start a span whose parent is the currently active span or transaction, if any. + Start a span whose parent is the currently active span, if any. The return value is a :py:class:`sentry_sdk.tracing.Span` instance, typically used as a context manager to start and stop timing in a `with` block. - Only spans contained in a transaction are sent to Sentry. Most - integrations start a transaction at the appropriate time, for example - for every incoming HTTP request. Use - :py:meth:`sentry_sdk.start_transaction` to start a new transaction when - one is not already in progress. - For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`. - - The instrumenter parameter is deprecated for user code, and it will - be removed in the next major version. Going forward, it should only - be used by the SDK itself. """ - if kwargs.get("description") is not None: - warnings.warn( - "The `description` parameter is deprecated. Please use `name` instead.", - DeprecationWarning, - stacklevel=2, - ) - - with new_scope(): - kwargs.setdefault("scope", self) - - client = self.get_client() - - configuration_instrumenter = client.options["instrumenter"] - - if instrumenter != configuration_instrumenter: - return NoOpSpan() - - # get current span or transaction - span = self.span or self.get_isolation_scope().span - - if span is None: - # New spans get the `trace_id` from the scope - if "trace_id" not in kwargs: - propagation_context = self.get_active_propagation_context() - if propagation_context is not None: - kwargs["trace_id"] = propagation_context.trace_id - - span = Span(**kwargs) - else: - # Children take `trace_id`` from the parent span. - span = span.start_child(**kwargs) - - return span + return NoOpSpan(**kwargs) + @contextmanager def continue_trace( - self, environ_or_headers, op=None, name=None, source=None, origin="manual" - ): - # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str], str) -> Transaction + self, environ_or_headers: Dict[str, Any] + ) -> Generator[None, None, None]: """ - Sets the propagation context from environment or headers and returns a transaction. + Sets the propagation context from environment or headers to continue an incoming trace. """ self.generate_propagation_context(environ_or_headers) + yield - # When we generate the propagation context, the sample_rand value is set - # if missing or invalid (we use the original value if it's valid). - # We want the transaction to use the same sample_rand value. Due to duplicated - # propagation logic in the transaction, we pass it in to avoid recomputing it - # in the transaction. - # TYPE SAFETY: self.generate_propagation_context() ensures that self._propagation_context - # is not None. - sample_rand = typing.cast( - PropagationContext, self._propagation_context - )._sample_rand() - - transaction = Transaction.continue_from_headers( - normalize_incoming_data(environ_or_headers), - _sample_rand=sample_rand, - op=op, - origin=origin, - name=name, - source=source, - ) - - return transaction - - def capture_event(self, event, hint=None, scope=None, **scope_kwargs): - # type: (Event, Optional[Hint], Optional[Scope], Any) -> Optional[str] + def capture_event( + self, + event: Event, + hint: Optional[Hint] = None, + scope: Optional[Scope] = None, + **scope_kwargs: Any, + ) -> Optional[str]: """ Captures an event. @@ -1212,8 +925,13 @@ def capture_event(self, event, hint=None, scope=None, **scope_kwargs): return event_id - def capture_message(self, message, level=None, scope=None, **scope_kwargs): - # type: (str, Optional[LogLevelStr], Optional[Scope], Any) -> Optional[str] + def capture_message( + self, + message: str, + level: Optional[LogLevelStr] = None, + scope: Optional[Scope] = None, + **scope_kwargs: Any, + ) -> Optional[str]: """ Captures a message. @@ -1236,15 +954,19 @@ def capture_message(self, message, level=None, scope=None, **scope_kwargs): if level is None: level = "info" - event = { + event: Event = { "message": message, "level": level, - } # type: Event + } return self.capture_event(event, scope=scope, **scope_kwargs) - def capture_exception(self, error=None, scope=None, **scope_kwargs): - # type: (Optional[Union[BaseException, ExcInfo]], Optional[Scope], Any) -> Optional[str] + def capture_exception( + self, + error: Optional[Union[BaseException, ExcInfo]] = None, + scope: Optional[Scope] = None, + **scope_kwargs: Any, + ) -> Optional[str]: """Captures an exception. :param error: An exception to capture. If `None`, `sys.exc_info()` will be used. @@ -1277,8 +999,7 @@ def capture_exception(self, error=None, scope=None, **scope_kwargs): return None - def start_session(self, *args, **kwargs): - # type: (*Any, **Any) -> None + def start_session(self, *args: Any, **kwargs: Any) -> None: """Starts a new session.""" session_mode = kwargs.pop("session_mode", "application") @@ -1292,8 +1013,7 @@ def start_session(self, *args, **kwargs): session_mode=session_mode, ) - def end_session(self, *args, **kwargs): - # type: (*Any, **Any) -> None + def end_session(self, *args: Any, **kwargs: Any) -> None: """Ends the current session if there is one.""" session = self._session self._session = None @@ -1302,8 +1022,7 @@ def end_session(self, *args, **kwargs): session.close() self.get_client().capture_session(session) - def stop_auto_session_tracking(self, *args, **kwargs): - # type: (*Any, **Any) -> None + def stop_auto_session_tracking(self, *args: Any, **kwargs: Any) -> None: """Stops automatic session tracking. This temporarily session tracking for the current scope when called. @@ -1312,18 +1031,14 @@ def stop_auto_session_tracking(self, *args, **kwargs): self.end_session() self._force_auto_session_tracking = False - def resume_auto_session_tracking(self): - # type: (...) -> None + def resume_auto_session_tracking(self) -> None: """Resumes automatic session tracking for the current scope if disabled earlier. This requires that generally automatic session tracking is enabled. """ self._force_auto_session_tracking = None - def add_event_processor( - self, func # type: EventProcessor - ): - # type: (...) -> None + def add_event_processor(self, func: EventProcessor) -> None: """Register a scope local event processor on the scope. :param func: This function behaves like `before_send.` @@ -1339,10 +1054,9 @@ def add_event_processor( def add_error_processor( self, - func, # type: ErrorProcessor - cls=None, # type: Optional[Type[BaseException]] - ): - # type: (...) -> None + func: ErrorProcessor, + cls: Optional[Type[BaseException]] = None, + ) -> None: """Register a scope local error processor on the scope. :param func: A callback that works similar to an event processor but is invoked with the original exception info triple as second argument. @@ -1353,8 +1067,7 @@ def add_error_processor( cls_ = cls # For mypy. real_func = func - def func(event, exc_info): - # type: (Event, ExcInfo) -> Optional[Event] + def wrapped_func(event: Event, exc_info: ExcInfo) -> Optional[Event]: try: is_inst = isinstance(exc_info[1], cls_) except Exception: @@ -1363,15 +1076,17 @@ def func(event, exc_info): return real_func(event, exc_info) return event - self._error_processors.append(func) + self._error_processors.append(wrapped_func) - def _apply_level_to_event(self, event, hint, options): - # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + def _apply_level_to_event( + self, event: Event, hint: Hint, options: Optional[Dict[str, Any]] + ) -> None: if self._level is not None: event["level"] = self._level - def _apply_breadcrumbs_to_event(self, event, hint, options): - # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + def _apply_breadcrumbs_to_event( + self, event: Event, hint: Hint, options: Optional[Dict[str, Any]] + ) -> None: event.setdefault("breadcrumbs", {}) # This check is just for mypy - @@ -1393,38 +1108,47 @@ def _apply_breadcrumbs_to_event(self, event, hint, options): logger.debug("Error when sorting breadcrumbs", exc_info=err) pass - def _apply_user_to_event(self, event, hint, options): - # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + def _apply_user_to_event( + self, event: Event, hint: Hint, options: Optional[Dict[str, Any]] + ) -> None: if event.get("user") is None and self._user is not None: event["user"] = self._user - def _apply_transaction_name_to_event(self, event, hint, options): - # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + def _apply_transaction_name_to_event( + self, event: Event, hint: Hint, options: Optional[Dict[str, Any]] + ) -> None: if event.get("transaction") is None and self._transaction is not None: event["transaction"] = self._transaction - def _apply_transaction_info_to_event(self, event, hint, options): - # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + def _apply_transaction_info_to_event( + self, event: Event, hint: Hint, options: Optional[Dict[str, Any]] + ) -> None: if event.get("transaction_info") is None and self._transaction_info is not None: event["transaction_info"] = self._transaction_info - def _apply_fingerprint_to_event(self, event, hint, options): - # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + def _apply_fingerprint_to_event( + self, event: Event, hint: Hint, options: Optional[Dict[str, Any]] + ) -> None: if event.get("fingerprint") is None and self._fingerprint is not None: event["fingerprint"] = self._fingerprint - def _apply_extra_to_event(self, event, hint, options): - # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + def _apply_extra_to_event( + self, event: Event, hint: Hint, options: Optional[Dict[str, Any]] + ) -> None: if self._extras: event.setdefault("extra", {}).update(self._extras) - def _apply_tags_to_event(self, event, hint, options): - # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + def _apply_tags_to_event( + self, event: Event, hint: Hint, options: Optional[Dict[str, Any]] + ) -> None: if self._tags: - event.setdefault("tags", {}).update(self._tags) + event.setdefault("tags", {}).update( + {k: safe_str(v) for k, v in self._tags.items()} + ) - def _apply_contexts_to_event(self, event, hint, options): - # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + def _apply_contexts_to_event( + self, event: Event, hint: Hint, options: Optional[Dict[str, Any]] + ) -> None: if self._contexts: event.setdefault("contexts", {}).update(self._contexts) @@ -1432,26 +1156,30 @@ def _apply_contexts_to_event(self, event, hint, options): # Add "trace" context if contexts.get("trace") is None: - if has_tracing_enabled(options) and self._span is not None: + if ( + options is not None + and has_tracing_enabled(options) + and self._span is not None + and self._span.is_valid + ): contexts["trace"] = self._span.get_trace_context() else: contexts["trace"] = self.get_trace_context() - def _apply_flags_to_event(self, event, hint, options): - # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + def _apply_flags_to_event( + self, event: Event, hint: Hint, options: Optional[Dict[str, Any]] + ) -> None: flags = self.flags.get() if len(flags) > 0: event.setdefault("contexts", {}).setdefault("flags", {}).update( {"values": flags} ) - def _drop(self, cause, ty): - # type: (Any, str) -> Optional[Any] + def _drop(self, cause: Any, ty: str) -> Optional[Any]: logger.info("%s (%s) dropped event", ty, cause) return None - def run_error_processors(self, event, hint): - # type: (Event, Hint) -> Optional[Event] + def run_error_processors(self, event: Event, hint: Hint) -> Optional[Event]: """ Runs the error processors on the event and returns the modified event. """ @@ -1472,8 +1200,7 @@ def run_error_processors(self, event, hint): return event - def run_event_processors(self, event, hint): - # type: (Event, Hint) -> Optional[Event] + def run_event_processors(self, event: Event, hint: Hint) -> Optional[Event]: """ Runs the event processors on the event and returns the modified event. """ @@ -1482,8 +1209,8 @@ def run_event_processors(self, event, hint): if not is_check_in: # Get scopes without creating them to prevent infinite recursion - isolation_scope = _isolation_scope.get() - current_scope = _current_scope.get() + isolation_scope = self._get_isolation_scope() + current_scope = self._get_current_scope() event_processors = chain( global_event_processors, @@ -1493,7 +1220,7 @@ def run_event_processors(self, event, hint): ) for event_processor in event_processors: - new_event = event + new_event: Optional[Event] = event with capture_internal_exceptions(): new_event = event_processor(event, hint) if new_event is None: @@ -1505,11 +1232,10 @@ def run_event_processors(self, event, hint): @_disable_capture def apply_to_event( self, - event, # type: Event - hint, # type: Hint - options=None, # type: Optional[Dict[str, Any]] - ): - # type: (...) -> Optional[Event] + event: Event, + hint: Hint, + options: Optional[Dict[str, Any]] = None, + ) -> Optional[Event]: """Applies the information contained on the scope to the given event.""" ty = event.get("type") is_transaction = ty == "transaction" @@ -1555,8 +1281,7 @@ def apply_to_event( return event - def update_from_scope(self, scope): - # type: (Scope) -> None + def update_from_scope(self, scope: Scope) -> None: """Update the scope with another scope's data.""" if scope._level is not None: self._level = scope._level @@ -1599,14 +1324,13 @@ def update_from_scope(self, scope): def update_from_kwargs( self, - user=None, # type: Optional[Any] - level=None, # type: Optional[LogLevelStr] - extras=None, # type: Optional[Dict[str, Any]] - contexts=None, # type: Optional[Dict[str, Dict[str, Any]]] - tags=None, # type: Optional[Dict[str, str]] - fingerprint=None, # type: Optional[List[str]] - ): - # type: (...) -> None + user: Optional[Any] = None, + level: Optional[LogLevelStr] = None, + extras: Optional[Dict[str, Any]] = None, + contexts: Optional[Dict[str, Dict[str, Any]]] = None, + tags: Optional[Dict[str, str]] = None, + fingerprint: Optional[List[str]] = None, + ) -> None: """Update the scope's attributes.""" if level is not None: self._level = level @@ -1621,8 +1345,7 @@ def update_from_kwargs( if fingerprint is not None: self._fingerprint = fingerprint - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return "<%s id=%s name=%s type=%s>" % ( self.__class__.__name__, hex(id(self)), @@ -1631,8 +1354,7 @@ def __repr__(self): ) @property - def flags(self): - # type: () -> FlagBuffer + def flags(self) -> FlagBuffer: if self._flags is None: max_flags = ( self.get_client().options["_experiments"].get("max_flags") @@ -1643,8 +1365,7 @@ def flags(self): @contextmanager -def new_scope(): - # type: () -> Generator[Scope, None, None] +def new_scope() -> Generator[Scope, None, None]: """ .. versionadded:: 2.0.0 @@ -1681,8 +1402,7 @@ def new_scope(): @contextmanager -def use_scope(scope): - # type: (Scope) -> Generator[Scope, None, None] +def use_scope(scope: Scope) -> Generator[Scope, None, None]: """ .. versionadded:: 2.0.0 @@ -1719,8 +1439,7 @@ def use_scope(scope): @contextmanager -def isolation_scope(): - # type: () -> Generator[Scope, None, None] +def isolation_scope() -> Generator[Scope, None, None]: """ .. versionadded:: 2.0.0 @@ -1768,8 +1487,7 @@ def isolation_scope(): @contextmanager -def use_isolation_scope(isolation_scope): - # type: (Scope) -> Generator[Scope, None, None] +def use_isolation_scope(isolation_scope: Scope) -> Generator[Scope, None, None]: """ .. versionadded:: 2.0.0 @@ -1814,14 +1532,10 @@ def use_isolation_scope(isolation_scope): capture_internal_exception(sys.exc_info()) -def should_send_default_pii(): - # type: () -> bool +def should_send_default_pii() -> bool: """Shortcut for `Scope.get_client().should_send_default_pii()`.""" return Scope.get_client().should_send_default_pii() # Circular imports from sentry_sdk.client import NonRecordingClient - -if TYPE_CHECKING: - import sentry_sdk.client diff --git a/sentry_sdk/scrubber.py b/sentry_sdk/scrubber.py index b0576c7e95..a8fcd9b8ba 100644 --- a/sentry_sdk/scrubber.py +++ b/sentry_sdk/scrubber.py @@ -1,14 +1,15 @@ +from __future__ import annotations from sentry_sdk.utils import ( capture_internal_exceptions, AnnotatedValue, iter_event_frames, ) -from typing import TYPE_CHECKING, cast, List, Dict +from typing import TYPE_CHECKING if TYPE_CHECKING: + from typing import List, Optional from sentry_sdk._types import Event - from typing import Optional DEFAULT_DENYLIST = [ @@ -60,9 +61,12 @@ class EventScrubber: def __init__( - self, denylist=None, recursive=False, send_default_pii=False, pii_denylist=None - ): - # type: (Optional[List[str]], bool, bool, Optional[List[str]]) -> None + self, + denylist: Optional[List[str]] = None, + recursive: bool = False, + send_default_pii: bool = False, + pii_denylist: Optional[List[str]] = None, + ) -> None: """ A scrubber that goes through the event payload and removes sensitive data configured through denylists. @@ -82,8 +86,7 @@ def __init__( self.denylist = [x.lower() for x in self.denylist] self.recursive = recursive - def scrub_list(self, lst): - # type: (object) -> None + def scrub_list(self, lst: object) -> None: """ If a list is passed to this method, the method recursively searches the list and any nested lists for any dictionaries. The method calls scrub_dict on all dictionaries @@ -97,8 +100,7 @@ def scrub_list(self, lst): self.scrub_dict(v) # no-op unless v is a dict self.scrub_list(v) # no-op unless v is a list - def scrub_dict(self, d): - # type: (object) -> None + def scrub_dict(self, d: object) -> None: """ If a dictionary is passed to this method, the method scrubs the dictionary of any sensitive data. The method calls itself recursively on any nested dictionaries ( @@ -117,8 +119,7 @@ def scrub_dict(self, d): self.scrub_dict(v) # no-op unless v is a dict self.scrub_list(v) # no-op unless v is a list - def scrub_request(self, event): - # type: (Event) -> None + def scrub_request(self, event: Event) -> None: with capture_internal_exceptions(): if "request" in event: if "headers" in event["request"]: @@ -128,20 +129,17 @@ def scrub_request(self, event): if "data" in event["request"]: self.scrub_dict(event["request"]["data"]) - def scrub_extra(self, event): - # type: (Event) -> None + def scrub_extra(self, event: Event) -> None: with capture_internal_exceptions(): if "extra" in event: self.scrub_dict(event["extra"]) - def scrub_user(self, event): - # type: (Event) -> None + def scrub_user(self, event: Event) -> None: with capture_internal_exceptions(): if "user" in event: self.scrub_dict(event["user"]) - def scrub_breadcrumbs(self, event): - # type: (Event) -> None + def scrub_breadcrumbs(self, event: Event) -> None: with capture_internal_exceptions(): if "breadcrumbs" in event: if ( @@ -152,23 +150,21 @@ def scrub_breadcrumbs(self, event): if "data" in value: self.scrub_dict(value["data"]) - def scrub_frames(self, event): - # type: (Event) -> None + def scrub_frames(self, event: Event) -> None: with capture_internal_exceptions(): for frame in iter_event_frames(event): if "vars" in frame: self.scrub_dict(frame["vars"]) - def scrub_spans(self, event): - # type: (Event) -> None + def scrub_spans(self, event: Event) -> None: with capture_internal_exceptions(): if "spans" in event: - for span in cast(List[Dict[str, object]], event["spans"]): - if "data" in span: - self.scrub_dict(span["data"]) + if not isinstance(event["spans"], AnnotatedValue): + for span in event["spans"]: + if "data" in span: + self.scrub_dict(span["data"]) - def scrub_event(self, event): - # type: (Event) -> None + def scrub_event(self, event: Event) -> None: self.scrub_request(event) self.scrub_extra(event) self.scrub_user(event) diff --git a/sentry_sdk/serializer.py b/sentry_sdk/serializer.py index 04df9857bd..1ab83ba293 100644 --- a/sentry_sdk/serializer.py +++ b/sentry_sdk/serializer.py @@ -1,3 +1,4 @@ +from __future__ import annotations import sys import math from collections.abc import Mapping, Sequence, Set @@ -26,7 +27,7 @@ from typing import Type from typing import Union - from sentry_sdk._types import NotImplementedType + from sentry_sdk._types import NotImplementedType, Event Span = Dict[str, Any] @@ -38,16 +39,6 @@ serializable_str_types = (str, bytes, bytearray, memoryview) -# Maximum length of JSON-serialized event payloads that can be safely sent -# before the server may reject the event due to its size. This is not intended -# to reflect actual values defined server-side, but rather only be an upper -# bound for events sent by the SDK. -# -# Can be overwritten if wanting to send more bytes, e.g. with a custom server. -# When changing this, keep in mind that events may be a little bit larger than -# this value due to attached metadata, so keep the number conservative. -MAX_EVENT_BYTES = 10**6 - # Maximum depth and breadth of databags. Excess data will be trimmed. If # max_request_body_size is "always", request bodies won't be trimmed. MAX_DATABAG_DEPTH = 5 @@ -55,37 +46,32 @@ CYCLE_MARKER = "" -global_repr_processors = [] # type: List[ReprProcessor] +global_repr_processors: List[ReprProcessor] = [] -def add_global_repr_processor(processor): - # type: (ReprProcessor) -> None +def add_global_repr_processor(processor: ReprProcessor) -> None: global_repr_processors.append(processor) -sequence_types = [Sequence, Set] # type: List[type] +sequence_types: list[type] = [Sequence, Set] -def add_repr_sequence_type(ty): - # type: (type) -> None +def add_repr_sequence_type(ty: type) -> None: sequence_types.append(ty) class Memo: __slots__ = ("_ids", "_objs") - def __init__(self): - # type: () -> None - self._ids = {} # type: Dict[int, Any] - self._objs = [] # type: List[Any] + def __init__(self) -> None: + self._ids: Dict[int, Any] = {} + self._objs: List[Any] = [] - def memoize(self, obj): - # type: (Any) -> ContextManager[bool] + def memoize(self, obj: Any) -> ContextManager[bool]: self._objs.append(obj) return self - def __enter__(self): - # type: () -> bool + def __enter__(self) -> bool: obj = self._objs[-1] if id(obj) in self._ids: return True @@ -95,16 +81,14 @@ def __enter__(self): def __exit__( self, - ty, # type: Optional[Type[BaseException]] - value, # type: Optional[BaseException] - tb, # type: Optional[TracebackType] - ): - # type: (...) -> None + ty: Optional[Type[BaseException]], + value: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: self._ids.pop(id(self._objs.pop()), None) -def serialize(event, **kwargs): - # type: (Dict[str, Any], **Any) -> Dict[str, Any] +def serialize(event: Union[Dict[str, Any], Event], **kwargs: Any) -> Dict[str, Any]: """ A very smart serializer that takes a dict and emits a json-friendly dict. Currently used for serializing the final Event and also prematurely while fetching the stack @@ -125,18 +109,15 @@ def serialize(event, **kwargs): """ memo = Memo() - path = [] # type: List[Segment] - meta_stack = [] # type: List[Dict[str, Any]] + path: List[Segment] = [] + meta_stack: List[Dict[str, Any]] = [] - keep_request_bodies = ( - kwargs.pop("max_request_body_size", None) == "always" - ) # type: bool - max_value_length = kwargs.pop("max_value_length", None) # type: Optional[int] + keep_request_bodies: bool = kwargs.pop("max_request_body_size", None) == "always" + max_value_length: Optional[int] = kwargs.pop("max_value_length", None) is_vars = kwargs.pop("is_vars", False) - custom_repr = kwargs.pop("custom_repr", None) # type: Callable[..., Optional[str]] + custom_repr: Callable[..., Optional[str]] = kwargs.pop("custom_repr", None) - def _safe_repr_wrapper(value): - # type: (Any) -> str + def _safe_repr_wrapper(value: Any) -> str: try: repr_value = None if custom_repr is not None: @@ -145,8 +126,7 @@ def _safe_repr_wrapper(value): except Exception: return safe_repr(value) - def _annotate(**meta): - # type: (**Any) -> None + def _annotate(**meta: Any) -> None: while len(meta_stack) <= len(path): try: segment = path[len(meta_stack) - 1] @@ -158,8 +138,7 @@ def _annotate(**meta): meta_stack[-1].setdefault("", {}).update(meta) - def _is_databag(): - # type: () -> Optional[bool] + def _is_databag() -> Optional[bool]: """ A databag is any value that we need to trim. True for stuff like vars, request bodies, breadcrumbs and extra. @@ -187,8 +166,7 @@ def _is_databag(): return False - def _is_request_body(): - # type: () -> Optional[bool] + def _is_request_body() -> Optional[bool]: try: if path[0] == "request" and path[1] == "data": return True @@ -198,15 +176,14 @@ def _is_request_body(): return False def _serialize_node( - obj, # type: Any - is_databag=None, # type: Optional[bool] - is_request_body=None, # type: Optional[bool] - should_repr_strings=None, # type: Optional[bool] - segment=None, # type: Optional[Segment] - remaining_breadth=None, # type: Optional[Union[int, float]] - remaining_depth=None, # type: Optional[Union[int, float]] - ): - # type: (...) -> Any + obj: Any, + is_databag: Optional[bool] = None, + is_request_body: Optional[bool] = None, + should_repr_strings: Optional[bool] = None, + segment: Optional[Segment] = None, + remaining_breadth: Optional[Union[int, float]] = None, + remaining_depth: Optional[Union[int, float]] = None, + ) -> Any: if segment is not None: path.append(segment) @@ -235,22 +212,20 @@ def _serialize_node( path.pop() del meta_stack[len(path) + 1 :] - def _flatten_annotated(obj): - # type: (Any) -> Any + def _flatten_annotated(obj: Any) -> Any: if isinstance(obj, AnnotatedValue): _annotate(**obj.metadata) obj = obj.value return obj def _serialize_node_impl( - obj, - is_databag, - is_request_body, - should_repr_strings, - remaining_depth, - remaining_breadth, - ): - # type: (Any, Optional[bool], Optional[bool], Optional[bool], Optional[Union[float, int]], Optional[Union[float, int]]) -> Any + obj: Any, + is_databag: Optional[bool], + is_request_body: Optional[bool], + should_repr_strings: Optional[bool], + remaining_depth: Optional[Union[float, int]], + remaining_breadth: Optional[Union[float, int]], + ) -> Any: if isinstance(obj, AnnotatedValue): should_repr_strings = False if should_repr_strings is None: @@ -314,7 +289,7 @@ def _serialize_node_impl( # might mutate our dictionary while we're still iterating over it. obj = dict(obj.items()) - rv_dict = {} # type: Dict[str, Any] + rv_dict: Dict[str, Any] = {} i = 0 for k, v in obj.items(): diff --git a/sentry_sdk/session.py b/sentry_sdk/session.py index c1d422c115..e392bc354b 100644 --- a/sentry_sdk/session.py +++ b/sentry_sdk/session.py @@ -1,3 +1,4 @@ +from __future__ import annotations import uuid from datetime import datetime, timezone @@ -6,23 +7,15 @@ from typing import TYPE_CHECKING if TYPE_CHECKING: - from typing import Optional - from typing import Union - from typing import Any - from typing import Dict - from sentry_sdk._types import SessionStatus + from typing import Optional, Union, Any, Dict -def _minute_trunc(ts): - # type: (datetime) -> datetime +def _minute_trunc(ts: datetime) -> datetime: return ts.replace(second=0, microsecond=0) -def _make_uuid( - val, # type: Union[str, uuid.UUID] -): - # type: (...) -> uuid.UUID +def _make_uuid(val: Union[str, uuid.UUID]) -> uuid.UUID: if isinstance(val, uuid.UUID): return val return uuid.UUID(val) @@ -31,21 +24,20 @@ def _make_uuid( class Session: def __init__( self, - sid=None, # type: Optional[Union[str, uuid.UUID]] - did=None, # type: Optional[str] - timestamp=None, # type: Optional[datetime] - started=None, # type: Optional[datetime] - duration=None, # type: Optional[float] - status=None, # type: Optional[SessionStatus] - release=None, # type: Optional[str] - environment=None, # type: Optional[str] - user_agent=None, # type: Optional[str] - ip_address=None, # type: Optional[str] - errors=None, # type: Optional[int] - user=None, # type: Optional[Any] - session_mode="application", # type: str - ): - # type: (...) -> None + sid: Optional[Union[str, uuid.UUID]] = None, + did: Optional[str] = None, + timestamp: Optional[datetime] = None, + started: Optional[datetime] = None, + duration: Optional[float] = None, + status: Optional[SessionStatus] = None, + release: Optional[str] = None, + environment: Optional[str] = None, + user_agent: Optional[str] = None, + ip_address: Optional[str] = None, + errors: Optional[int] = None, + user: Optional[Any] = None, + session_mode: str = "application", + ) -> None: if sid is None: sid = uuid.uuid4() if started is None: @@ -53,14 +45,14 @@ def __init__( if status is None: status = "ok" self.status = status - self.did = None # type: Optional[str] + self.did: Optional[str] = None self.started = started - self.release = None # type: Optional[str] - self.environment = None # type: Optional[str] - self.duration = None # type: Optional[float] - self.user_agent = None # type: Optional[str] - self.ip_address = None # type: Optional[str] - self.session_mode = session_mode # type: str + self.release: Optional[str] = None + self.environment: Optional[str] = None + self.duration: Optional[float] = None + self.user_agent: Optional[str] = None + self.ip_address: Optional[str] = None + self.session_mode: str = session_mode self.errors = 0 self.update( @@ -77,26 +69,24 @@ def __init__( ) @property - def truncated_started(self): - # type: (...) -> datetime + def truncated_started(self) -> datetime: return _minute_trunc(self.started) def update( self, - sid=None, # type: Optional[Union[str, uuid.UUID]] - did=None, # type: Optional[str] - timestamp=None, # type: Optional[datetime] - started=None, # type: Optional[datetime] - duration=None, # type: Optional[float] - status=None, # type: Optional[SessionStatus] - release=None, # type: Optional[str] - environment=None, # type: Optional[str] - user_agent=None, # type: Optional[str] - ip_address=None, # type: Optional[str] - errors=None, # type: Optional[int] - user=None, # type: Optional[Any] - ): - # type: (...) -> None + sid: Optional[Union[str, uuid.UUID]] = None, + did: Optional[str] = None, + timestamp: Optional[datetime] = None, + started: Optional[datetime] = None, + duration: Optional[float] = None, + status: Optional[SessionStatus] = None, + release: Optional[str] = None, + environment: Optional[str] = None, + user_agent: Optional[str] = None, + ip_address: Optional[str] = None, + errors: Optional[int] = None, + user: Optional[Any] = None, + ) -> None: # If a user is supplied we pull some data form it if user: if ip_address is None: @@ -129,19 +119,13 @@ def update( if status is not None: self.status = status - def close( - self, status=None # type: Optional[SessionStatus] - ): - # type: (...) -> Any + def close(self, status: Optional[SessionStatus] = None) -> Any: if status is None and self.status == "ok": status = "exited" if status is not None: self.update(status=status) - def get_json_attrs( - self, with_user_info=True # type: Optional[bool] - ): - # type: (...) -> Any + def get_json_attrs(self, with_user_info: bool = True) -> Any: attrs = {} if self.release is not None: attrs["release"] = self.release @@ -154,15 +138,14 @@ def get_json_attrs( attrs["user_agent"] = self.user_agent return attrs - def to_json(self): - # type: (...) -> Any - rv = { + def to_json(self) -> Any: + rv: Dict[str, Any] = { "sid": str(self.sid), "init": True, "started": format_timestamp(self.started), "timestamp": format_timestamp(self.timestamp), "status": self.status, - } # type: Dict[str, Any] + } if self.errors: rv["errors"] = self.errors if self.did is not None: diff --git a/sentry_sdk/sessions.py b/sentry_sdk/sessions.py index 00fda23200..e20ffc4dd9 100644 --- a/sentry_sdk/sessions.py +++ b/sentry_sdk/sessions.py @@ -1,5 +1,5 @@ +from __future__ import annotations import os -import warnings from threading import Thread, Lock, Event from contextlib import contextmanager @@ -11,85 +11,17 @@ from typing import TYPE_CHECKING if TYPE_CHECKING: - from typing import Any - from typing import Callable - from typing import Dict - from typing import Generator - from typing import List - from typing import Optional - from typing import Union - - -def is_auto_session_tracking_enabled(hub=None): - # type: (Optional[sentry_sdk.Hub]) -> Union[Any, bool, None] - """DEPRECATED: Utility function to find out if session tracking is enabled.""" - - # Internal callers should use private _is_auto_session_tracking_enabled, instead. - warnings.warn( - "This function is deprecated and will be removed in the next major release. " - "There is no public API replacement.", - DeprecationWarning, - stacklevel=2, + from typing import ( + Any, + Callable, + Dict, + List, + Optional, + Generator, ) - if hub is None: - hub = sentry_sdk.Hub.current - should_track = hub.scope._force_auto_session_tracking - - if should_track is None: - client_options = hub.client.options if hub.client else {} - should_track = client_options.get("auto_session_tracking", False) - - return should_track - - -@contextmanager -def auto_session_tracking(hub=None, session_mode="application"): - # type: (Optional[sentry_sdk.Hub], str) -> Generator[None, None, None] - """DEPRECATED: Use track_session instead - Starts and stops a session automatically around a block. - """ - warnings.warn( - "This function is deprecated and will be removed in the next major release. " - "Use track_session instead.", - DeprecationWarning, - stacklevel=2, - ) - - if hub is None: - hub = sentry_sdk.Hub.current - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - should_track = is_auto_session_tracking_enabled(hub) - if should_track: - hub.start_session(session_mode=session_mode) - try: - yield - finally: - if should_track: - hub.end_session() - - -def is_auto_session_tracking_enabled_scope(scope): - # type: (sentry_sdk.Scope) -> bool - """ - DEPRECATED: Utility function to find out if session tracking is enabled. - """ - - warnings.warn( - "This function is deprecated and will be removed in the next major release. " - "There is no public API replacement.", - DeprecationWarning, - stacklevel=2, - ) - - # Internal callers should use private _is_auto_session_tracking_enabled, instead. - return _is_auto_session_tracking_enabled(scope) - - -def _is_auto_session_tracking_enabled(scope): - # type: (sentry_sdk.Scope) -> bool +def _is_auto_session_tracking_enabled(scope: sentry_sdk.Scope) -> bool: """ Utility function to find out if session tracking is enabled. """ @@ -103,25 +35,9 @@ def _is_auto_session_tracking_enabled(scope): @contextmanager -def auto_session_tracking_scope(scope, session_mode="application"): - # type: (sentry_sdk.Scope, str) -> Generator[None, None, None] - """DEPRECATED: This function is a deprecated alias for track_session. - Starts and stops a session automatically around a block. - """ - - warnings.warn( - "This function is a deprecated alias for track_session and will be removed in the next major release.", - DeprecationWarning, - stacklevel=2, - ) - - with track_session(scope, session_mode=session_mode): - yield - - -@contextmanager -def track_session(scope, session_mode="application"): - # type: (sentry_sdk.Scope, str) -> Generator[None, None, None] +def track_session( + scope: sentry_sdk.Scope, session_mode: str = "application" +) -> Generator[None, None, None]: """ Start a new session in the provided scope, assuming session tracking is enabled. This is a no-op context manager if session tracking is not enabled. @@ -141,30 +57,27 @@ def track_session(scope, session_mode="application"): MAX_ENVELOPE_ITEMS = 100 -def make_aggregate_envelope(aggregate_states, attrs): - # type: (Any, Any) -> Any +def make_aggregate_envelope(aggregate_states: Any, attrs: Any) -> Any: return {"attrs": dict(attrs), "aggregates": list(aggregate_states.values())} class SessionFlusher: def __init__( self, - capture_func, # type: Callable[[Envelope], None] - flush_interval=60, # type: int - ): - # type: (...) -> None + capture_func: Callable[[Envelope], None], + flush_interval: int = 60, + ) -> None: self.capture_func = capture_func self.flush_interval = flush_interval - self.pending_sessions = [] # type: List[Any] - self.pending_aggregates = {} # type: Dict[Any, Any] - self._thread = None # type: Optional[Thread] + self.pending_sessions: List[Any] = [] + self.pending_aggregates: Dict[Any, Any] = {} + self._thread: Optional[Thread] = None self._thread_lock = Lock() self._aggregate_lock = Lock() - self._thread_for_pid = None # type: Optional[int] - self.__shutdown_requested = Event() + self._thread_for_pid: Optional[int] = None + self.__shutdown_requested: Event = Event() - def flush(self): - # type: (...) -> None + def flush(self) -> None: pending_sessions = self.pending_sessions self.pending_sessions = [] @@ -190,8 +103,7 @@ def flush(self): if len(envelope.items) > 0: self.capture_func(envelope) - def _ensure_running(self): - # type: (...) -> None + def _ensure_running(self) -> None: """ Check that we have an active thread to run in, or create one if not. @@ -205,8 +117,7 @@ def _ensure_running(self): if self._thread_for_pid == os.getpid() and self._thread is not None: return None - def _thread(): - # type: (...) -> None + def _thread() -> None: running = True while running: running = not self.__shutdown_requested.wait(self.flush_interval) @@ -227,10 +138,7 @@ def _thread(): return None - def add_aggregate_session( - self, session # type: Session - ): - # type: (...) -> None + def add_aggregate_session(self, session: Session) -> None: # NOTE on `session.did`: # the protocol can deal with buckets that have a distinct-id, however # in practice we expect the python SDK to have an extremely high cardinality @@ -258,16 +166,12 @@ def add_aggregate_session( else: state["exited"] = state.get("exited", 0) + 1 - def add_session( - self, session # type: Session - ): - # type: (...) -> None + def add_session(self, session: Session) -> None: if session.session_mode == "request": self.add_aggregate_session(session) else: self.pending_sessions.append(session.to_json()) self._ensure_running() - def kill(self): - # type: (...) -> None + def kill(self) -> None: self.__shutdown_requested.set() diff --git a/sentry_sdk/spotlight.py b/sentry_sdk/spotlight.py index 4ac427b9c1..976879dc84 100644 --- a/sentry_sdk/spotlight.py +++ b/sentry_sdk/spotlight.py @@ -1,3 +1,4 @@ +from __future__ import annotations import io import logging import os @@ -12,11 +13,7 @@ from typing import TYPE_CHECKING if TYPE_CHECKING: - from typing import Any - from typing import Callable - from typing import Dict - from typing import Optional - from typing import Self + from typing import Any, Callable, Dict, Optional from sentry_sdk.utils import ( logger as sentry_logger, @@ -34,14 +31,12 @@ class SpotlightClient: - def __init__(self, url): - # type: (str) -> None + def __init__(self, url: str) -> None: self.url = url self.http = urllib3.PoolManager() self.fails = 0 - def capture_envelope(self, envelope): - # type: (Envelope) -> None + def capture_envelope(self, envelope: Envelope) -> None: body = io.BytesIO() envelope.serialize_into(body) try: @@ -90,11 +85,10 @@ def capture_envelope(self, envelope): ) class SpotlightMiddleware(MiddlewareMixin): # type: ignore[misc] - _spotlight_script = None # type: Optional[str] - _spotlight_url = None # type: Optional[str] + _spotlight_script: Optional[str] = None + _spotlight_url: Optional[str] = None - def __init__(self, get_response): - # type: (Self, Callable[..., HttpResponse]) -> None + def __init__(self, get_response: Callable[..., HttpResponse]) -> None: super().__init__(get_response) import sentry_sdk.api @@ -111,8 +105,7 @@ def __init__(self, get_response): self._spotlight_url = urllib.parse.urljoin(spotlight_client.url, "../") @property - def spotlight_script(self): - # type: (Self) -> Optional[str] + def spotlight_script(self) -> Optional[str]: if self._spotlight_url is not None and self._spotlight_script is None: try: spotlight_js_url = urllib.parse.urljoin( @@ -136,8 +129,9 @@ def spotlight_script(self): return self._spotlight_script - def process_response(self, _request, response): - # type: (Self, HttpRequest, HttpResponse) -> Optional[HttpResponse] + def process_response( + self, _request: HttpRequest, response: HttpResponse + ) -> Optional[HttpResponse]: content_type_header = tuple( p.strip() for p in response.headers.get("Content-Type", "").lower().split(";") @@ -181,8 +175,9 @@ def process_response(self, _request, response): return response - def process_exception(self, _request, exception): - # type: (Self, HttpRequest, Exception) -> Optional[HttpResponseServerError] + def process_exception( + self, _request: HttpRequest, exception: Exception + ) -> Optional[HttpResponseServerError]: if not settings.DEBUG or not self._spotlight_url: return None @@ -207,8 +202,7 @@ def process_exception(self, _request, exception): settings = None -def setup_spotlight(options): - # type: (Dict[str, Any]) -> Optional[SpotlightClient] +def setup_spotlight(options: Dict[str, Any]) -> Optional[SpotlightClient]: _handler = logging.StreamHandler(sys.stderr) _handler.setFormatter(logging.Formatter(" [spotlight] %(levelname)s: %(message)s")) logger.addHandler(_handler) diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index dd1392d150..aeab1e38ff 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1,376 +1,245 @@ -from decimal import Decimal -import uuid +from __future__ import annotations +from datetime import datetime +import json import warnings -from datetime import datetime, timedelta, timezone -from enum import Enum -import sentry_sdk -from sentry_sdk.consts import INSTRUMENTER, SPANSTATUS, SPANDATA -from sentry_sdk.profiler.continuous_profiler import get_profiler_id +from opentelemetry import trace as otel_trace, context +from opentelemetry.trace import ( + format_trace_id, + format_span_id, + Span as OtelSpan, + TraceState, + get_current_span, + INVALID_SPAN, +) +from opentelemetry.trace.status import Status, StatusCode +from opentelemetry.sdk.trace import ReadableSpan +from opentelemetry.version import __version__ as otel_version + +from sentry_sdk.consts import ( + DEFAULT_SPAN_NAME, + DEFAULT_SPAN_ORIGIN, + BAGGAGE_HEADER_NAME, + SENTRY_TRACE_HEADER_NAME, + SPANSTATUS, + SPANDATA, + TransactionSource, +) +from sentry_sdk.opentelemetry.consts import ( + TRACESTATE_SAMPLE_RATE_KEY, + SentrySpanAttribute, +) +from sentry_sdk.opentelemetry.utils import ( + baggage_from_trace_state, + convert_from_otel_timestamp, + convert_to_otel_timestamp, + get_trace_context, + get_trace_state, + get_sentry_meta, + serialize_trace_state, +) +from sentry_sdk.tracing_utils import ( + get_span_status_from_http_code, + _is_span_origin_excluded, +) from sentry_sdk.utils import ( + _serialize_span_attribute, get_current_thread_meta, - is_valid_sample_rate, - logger, - nanosecond_time, + parse_version, should_be_treated_as_error, ) -from typing import TYPE_CHECKING - +from typing import TYPE_CHECKING, overload if TYPE_CHECKING: - from collections.abc import Callable, Mapping, MutableMapping - from typing import Any - from typing import Dict - from typing import Iterator - from typing import List - from typing import Optional - from typing import overload - from typing import ParamSpec - from typing import Tuple - from typing import Union - from typing import TypeVar - - from typing_extensions import TypedDict, Unpack + from typing import ( + Callable, + Any, + Dict, + Iterator, + Optional, + ParamSpec, + Tuple, + Union, + TypeVar, + ) + from sentry_sdk._types import SamplingContext + from sentry_sdk.tracing_utils import Baggage P = ParamSpec("P") R = TypeVar("R") - from sentry_sdk.profiler.continuous_profiler import ContinuousProfile - from sentry_sdk.profiler.transaction_profiler import Profile - from sentry_sdk._types import ( - Event, - MeasurementUnit, - SamplingContext, - MeasurementValue, - ) - class SpanKwargs(TypedDict, total=False): - trace_id: str - """ - The trace ID of the root span. If this new span is to be the root span, - omit this parameter, and a new trace ID will be generated. - """ +_FLAGS_CAPACITY = 10 +_OTEL_VERSION = parse_version(otel_version) - span_id: str - """The span ID of this span. If omitted, a new span ID will be generated.""" +tracer = otel_trace.get_tracer(__name__) - parent_span_id: str - """The span ID of the parent span, if applicable.""" - same_process_as_parent: bool - """Whether this span is in the same process as the parent span.""" +class NoOpSpan: + def __init__(self, **kwargs: Any) -> None: + pass - sampled: bool - """ - Whether the span should be sampled. Overrides the default sampling decision - for this span when provided. - """ + def __repr__(self) -> str: + return "<%s>" % self.__class__.__name__ - op: str - """ - The span's operation. A list of recommended values is available here: - https://develop.sentry.dev/sdk/performance/span-operations/ - """ + @property + def root_span(self) -> Optional[Span]: + return None - description: str - """A description of what operation is being performed within the span. This argument is DEPRECATED. Please use the `name` parameter, instead.""" + def start_child(self, **kwargs: Any) -> NoOpSpan: + return NoOpSpan() - hub: Optional["sentry_sdk.Hub"] - """The hub to use for this span. This argument is DEPRECATED. Please use the `scope` parameter, instead.""" + def to_traceparent(self) -> str: + return "" - status: str - """The span's status. Possible values are listed at https://develop.sentry.dev/sdk/event-payloads/span/""" + def to_baggage(self) -> Optional[Baggage]: + return None - containing_transaction: Optional["Transaction"] - """The transaction that this span belongs to.""" + def get_baggage(self) -> Optional[Baggage]: + return None - start_timestamp: Optional[Union[datetime, float]] - """ - The timestamp when the span started. If omitted, the current time - will be used. - """ + def iter_headers(self) -> Iterator[Tuple[str, str]]: + return iter(()) - scope: "sentry_sdk.Scope" - """The scope to use for this span. If not provided, we use the current scope.""" + def set_tag(self, key: str, value: Any) -> None: + pass - origin: str - """ - The origin of the span. - See https://develop.sentry.dev/sdk/performance/trace-origin/ - Default "manual". - """ + def set_data(self, key: str, value: Any) -> None: + pass - name: str - """A string describing what operation is being performed within the span/transaction.""" + def set_status(self, value: str) -> None: + pass - class TransactionKwargs(SpanKwargs, total=False): - source: str - """ - A string describing the source of the transaction name. This will be used to determine the transaction's type. - See https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations for more information. - Default "custom". - """ + def set_http_status(self, http_status: int) -> None: + pass - parent_sampled: bool - """Whether the parent transaction was sampled. If True this transaction will be kept, if False it will be discarded.""" + def is_success(self) -> bool: + return True - baggage: "Baggage" - """The W3C baggage header value. (see https://www.w3.org/TR/baggage/)""" + def to_json(self) -> Dict[str, Any]: + return {} - ProfileContext = TypedDict( - "ProfileContext", - { - "profiler_id": str, - }, - ) + def get_trace_context(self) -> Any: + return {} -BAGGAGE_HEADER_NAME = "baggage" -SENTRY_TRACE_HEADER_NAME = "sentry-trace" - - -# Transaction source -# see https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations -class TransactionSource(str, Enum): - COMPONENT = "component" - CUSTOM = "custom" - ROUTE = "route" - TASK = "task" - URL = "url" - VIEW = "view" - - def __str__(self): - # type: () -> str - return self.value - - -# These are typically high cardinality and the server hates them -LOW_QUALITY_TRANSACTION_SOURCES = [ - TransactionSource.URL, -] - -SOURCE_FOR_STYLE = { - "endpoint": TransactionSource.COMPONENT, - "function_name": TransactionSource.COMPONENT, - "handler_name": TransactionSource.COMPONENT, - "method_and_path_pattern": TransactionSource.ROUTE, - "path": TransactionSource.URL, - "route_name": TransactionSource.COMPONENT, - "route_pattern": TransactionSource.ROUTE, - "uri_template": TransactionSource.ROUTE, - "url": TransactionSource.ROUTE, -} - - -def get_span_status_from_http_code(http_status_code): - # type: (int) -> str - """ - Returns the Sentry status corresponding to the given HTTP status code. + def get_profile_context(self) -> Any: + return {} - See: https://develop.sentry.dev/sdk/event-payloads/contexts/#trace-context - """ - if http_status_code < 400: - return SPANSTATUS.OK - - elif 400 <= http_status_code < 500: - if http_status_code == 403: - return SPANSTATUS.PERMISSION_DENIED - elif http_status_code == 404: - return SPANSTATUS.NOT_FOUND - elif http_status_code == 429: - return SPANSTATUS.RESOURCE_EXHAUSTED - elif http_status_code == 413: - return SPANSTATUS.FAILED_PRECONDITION - elif http_status_code == 401: - return SPANSTATUS.UNAUTHENTICATED - elif http_status_code == 409: - return SPANSTATUS.ALREADY_EXISTS - else: - return SPANSTATUS.INVALID_ARGUMENT - - elif 500 <= http_status_code < 600: - if http_status_code == 504: - return SPANSTATUS.DEADLINE_EXCEEDED - elif http_status_code == 501: - return SPANSTATUS.UNIMPLEMENTED - elif http_status_code == 503: - return SPANSTATUS.UNAVAILABLE - else: - return SPANSTATUS.INTERNAL_ERROR + def finish(self, end_timestamp: Optional[Union[float, datetime]] = None) -> None: + pass - return SPANSTATUS.UNKNOWN_ERROR + def set_context(self, key: str, value: dict[str, Any]) -> None: + pass + def init_span_recorder(self, maxlen: int) -> None: + pass -class _SpanRecorder: - """Limits the number of spans recorded in a transaction.""" + def _set_initial_sampling_decision(self, sampling_context: SamplingContext) -> None: + pass - __slots__ = ("maxlen", "spans", "dropped_spans") - def __init__(self, maxlen): - # type: (int) -> None - # FIXME: this is `maxlen - 1` only to preserve historical behavior - # enforced by tests. - # Either this should be changed to `maxlen` or the JS SDK implementation - # should be changed to match a consistent interpretation of what maxlen - # limits: either transaction+spans or only child spans. - self.maxlen = maxlen - 1 - self.spans = [] # type: List[Span] - self.dropped_spans = 0 # type: int +class Span: + """ + OTel span wrapper providing compatibility with the old span interface. + """ - def add(self, span): - # type: (Span) -> None - if len(self.spans) > self.maxlen: - span._span_recorder = None - self.dropped_spans += 1 + def __init__( + self, + *, + op: Optional[str] = None, + description: Optional[str] = None, + status: Optional[str] = None, + sampled: Optional[bool] = None, + start_timestamp: Optional[Union[datetime, float]] = None, + origin: Optional[str] = None, + name: Optional[str] = None, + source: str = TransactionSource.CUSTOM, + attributes: Optional[dict[str, Any]] = None, + only_as_child_span: bool = False, + parent_span: Optional[Span] = None, + otel_span: Optional[OtelSpan] = None, + span: Optional[Span] = None, + ) -> None: + """ + If otel_span is passed explicitly, just acts as a proxy. + + If span is passed explicitly, use it. The only purpose of this param + is backwards compatibility with start_transaction(transaction=...). + + If only_as_child_span is True, just return an INVALID_SPAN + and avoid instrumentation if there's no active parent span. + """ + if otel_span is not None: + self._otel_span = otel_span + elif span is not None: + self._otel_span = span._otel_span else: - self.spans.append(span) + skip_span = False + if only_as_child_span and parent_span is None: + parent_span_context = get_current_span().get_span_context() + skip_span = ( + not parent_span_context.is_valid or parent_span_context.is_remote + ) + origin = origin or DEFAULT_SPAN_ORIGIN + if not skip_span and _is_span_origin_excluded(origin): + skip_span = True -class Span: - """A span holds timing information of a block of code. - Spans can have multiple child spans thus forming a span tree. - - :param trace_id: The trace ID of the root span. If this new span is to be the root span, - omit this parameter, and a new trace ID will be generated. - :param span_id: The span ID of this span. If omitted, a new span ID will be generated. - :param parent_span_id: The span ID of the parent span, if applicable. - :param same_process_as_parent: Whether this span is in the same process as the parent span. - :param sampled: Whether the span should be sampled. Overrides the default sampling decision - for this span when provided. - :param op: The span's operation. A list of recommended values is available here: - https://develop.sentry.dev/sdk/performance/span-operations/ - :param description: A description of what operation is being performed within the span. - - .. deprecated:: 2.15.0 - Please use the `name` parameter, instead. - :param name: A string describing what operation is being performed within the span. - :param hub: The hub to use for this span. - - .. deprecated:: 2.0.0 - Please use the `scope` parameter, instead. - :param status: The span's status. Possible values are listed at - https://develop.sentry.dev/sdk/event-payloads/span/ - :param containing_transaction: The transaction that this span belongs to. - :param start_timestamp: The timestamp when the span started. If omitted, the current time - will be used. - :param scope: The scope to use for this span. If not provided, we use the current scope. - """ + if skip_span: + self._otel_span = INVALID_SPAN + else: + if start_timestamp is not None: + # OTel timestamps have nanosecond precision + start_timestamp = convert_to_otel_timestamp(start_timestamp) + + span_name = name or description or DEFAULT_SPAN_NAME + + # Prepopulate some attrs so that they're accessible in traces_sampler + attributes = attributes or {} + if op is not None: + attributes[SentrySpanAttribute.OP] = op + if source is not None: + attributes[SentrySpanAttribute.SOURCE] = source + if description is not None: + attributes[SentrySpanAttribute.DESCRIPTION] = description + if sampled is not None: + attributes[SentrySpanAttribute.CUSTOM_SAMPLED] = sampled + + parent_context = None + if parent_span is not None: + parent_context = otel_trace.set_span_in_context( + parent_span._otel_span + ) + + self._otel_span = tracer.start_span( + span_name, + context=parent_context, + start_time=start_timestamp, + attributes=attributes, + ) - __slots__ = ( - "trace_id", - "span_id", - "parent_span_id", - "same_process_as_parent", - "sampled", - "op", - "description", - "_measurements", - "start_timestamp", - "_start_timestamp_monotonic_ns", - "status", - "timestamp", - "_tags", - "_data", - "_span_recorder", - "hub", - "_context_manager_state", - "_containing_transaction", - "_local_aggregator", - "scope", - "origin", - "name", - "_flags", - "_flags_capacity", - ) + self.origin = origin + self.description = description + self.name = span_name - def __init__( - self, - trace_id=None, # type: Optional[str] - span_id=None, # type: Optional[str] - parent_span_id=None, # type: Optional[str] - same_process_as_parent=True, # type: bool - sampled=None, # type: Optional[bool] - op=None, # type: Optional[str] - description=None, # type: Optional[str] - hub=None, # type: Optional[sentry_sdk.Hub] # deprecated - status=None, # type: Optional[str] - containing_transaction=None, # type: Optional[Transaction] - start_timestamp=None, # type: Optional[Union[datetime, float]] - scope=None, # type: Optional[sentry_sdk.Scope] - origin="manual", # type: str - name=None, # type: Optional[str] - ): - # type: (...) -> None - self.trace_id = trace_id or uuid.uuid4().hex - self.span_id = span_id or uuid.uuid4().hex[16:] - self.parent_span_id = parent_span_id - self.same_process_as_parent = same_process_as_parent - self.sampled = sampled - self.op = op - self.description = name or description - self.status = status - self.hub = hub # backwards compatibility - self.scope = scope - self.origin = origin - self._measurements = {} # type: Dict[str, MeasurementValue] - self._tags = {} # type: MutableMapping[str, str] - self._data = {} # type: Dict[str, Any] - self._containing_transaction = containing_transaction - self._flags = {} # type: Dict[str, bool] - self._flags_capacity = 10 - - if hub is not None: - warnings.warn( - "The `hub` parameter is deprecated. Please use `scope` instead.", - DeprecationWarning, - stacklevel=2, - ) + if status is not None: + self.set_status(status) + + self.update_active_thread() - self.scope = self.scope or hub.scope - - if start_timestamp is None: - start_timestamp = datetime.now(timezone.utc) - elif isinstance(start_timestamp, float): - start_timestamp = datetime.fromtimestamp(start_timestamp, timezone.utc) - self.start_timestamp = start_timestamp - try: - # profiling depends on this value and requires that - # it is measured in nanoseconds - self._start_timestamp_monotonic_ns = nanosecond_time() - except AttributeError: - pass - - #: End timestamp of span - self.timestamp = None # type: Optional[datetime] - - self._span_recorder = None # type: Optional[_SpanRecorder] - self._local_aggregator = None # type: Optional[LocalAggregator] - - self.update_active_thread() - self.set_profiler_id(get_profiler_id()) - - # TODO this should really live on the Transaction class rather than the Span - # class - def init_span_recorder(self, maxlen): - # type: (int) -> None - if self._span_recorder is None: - self._span_recorder = _SpanRecorder(maxlen) - - def _get_local_aggregator(self): - # type: (...) -> LocalAggregator - rv = self._local_aggregator - if rv is None: - rv = self._local_aggregator = LocalAggregator() - return rv - - def __repr__(self): - # type: () -> str + def __eq__(self, other: object) -> bool: + if not isinstance(other, Span): + return False + return self._otel_span == other._otel_span + + def __repr__(self) -> str: return ( - "<%s(op=%r, description:%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, origin=%r)>" + "<%s(op=%r, name:%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, origin=%r)>" % ( self.__class__.__name__, self.op, - self.description, + self.name, self.trace_id, self.span_id, self.parent_span_id, @@ -379,199 +248,162 @@ def __repr__(self): ) ) - def __enter__(self): - # type: () -> Span - scope = self.scope or sentry_sdk.get_current_scope() - old_span = scope.span - scope.span = self - self._context_manager_state = (scope, old_span) + def activate(self) -> None: + ctx = otel_trace.set_span_in_context(self._otel_span) + # set as the implicit current context + self._ctx_token = context.attach(ctx) + + def deactivate(self) -> None: + if hasattr(self, "_ctx_token"): + context.detach(self._ctx_token) + del self._ctx_token + + def __enter__(self) -> Span: + self.activate() return self - def __exit__(self, ty, value, tb): - # type: (Optional[Any], Optional[Any], Optional[Any]) -> None + def __exit__( + self, ty: Optional[Any], value: Optional[Any], tb: Optional[Any] + ) -> None: if value is not None and should_be_treated_as_error(ty, value): self.set_status(SPANSTATUS.INTERNAL_ERROR) + else: + status_unset = ( + hasattr(self._otel_span, "status") + and self._otel_span.status.status_code == StatusCode.UNSET + ) + if status_unset: + self.set_status(SPANSTATUS.OK) - scope, old_span = self._context_manager_state - del self._context_manager_state - self.finish(scope) - scope.span = old_span - - @property - def containing_transaction(self): - # type: () -> Optional[Transaction] - """The ``Transaction`` that this span belongs to. - The ``Transaction`` is the root of the span tree, - so one could also think of this ``Transaction`` as the "root span".""" - - # this is a getter rather than a regular attribute so that transactions - # can return `self` here instead (as a way to prevent them circularly - # referencing themselves) - return self._containing_transaction - - def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): - # type: (str, **Any) -> Span - """ - Start a sub-span from the current span or transaction. + self.finish() + self.deactivate() - Takes the same arguments as the initializer of :py:class:`Span`. The - trace id, sampling decision, transaction pointer, and span recorder are - inherited from the current span/transaction. + async def __aenter__(self) -> Span: + return self.__enter__() - The instrumenter parameter is deprecated for user code, and it will - be removed in the next major version. Going forward, it should only - be used by the SDK itself. - """ - if kwargs.get("description") is not None: - warnings.warn( - "The `description` parameter is deprecated. Please use `name` instead.", - DeprecationWarning, - stacklevel=2, - ) + async def __aexit__( + self, ty: Optional[Any], value: Optional[Any], tb: Optional[Any] + ) -> None: + return self.__exit__(ty, value, tb) - configuration_instrumenter = sentry_sdk.get_client().options["instrumenter"] + @property + def description(self) -> Optional[str]: + return self.get_attribute(SentrySpanAttribute.DESCRIPTION) - if instrumenter != configuration_instrumenter: - return NoOpSpan() + @description.setter + def description(self, value: Optional[str]) -> None: + self.set_attribute(SentrySpanAttribute.DESCRIPTION, value) - kwargs.setdefault("sampled", self.sampled) + @property + def origin(self) -> Optional[str]: + return self.get_attribute(SentrySpanAttribute.ORIGIN) - child = Span( - trace_id=self.trace_id, - parent_span_id=self.span_id, - containing_transaction=self.containing_transaction, - **kwargs, - ) + @origin.setter + def origin(self, value: Optional[str]) -> None: + self.set_attribute(SentrySpanAttribute.ORIGIN, value) - span_recorder = ( - self.containing_transaction and self.containing_transaction._span_recorder + @property + def root_span(self) -> Optional[Span]: + root_otel_span: Optional[OtelSpan] = get_sentry_meta( + self._otel_span, "root_span" ) - if span_recorder: - span_recorder.add(child) - - return child - - @classmethod - def continue_from_environ( - cls, - environ, # type: Mapping[str, str] - **kwargs, # type: Any - ): - # type: (...) -> Transaction - """ - Create a Transaction with the given params, then add in data pulled from - the ``sentry-trace`` and ``baggage`` headers from the environ (if any) - before returning the Transaction. + return Span(otel_span=root_otel_span) if root_otel_span else None - This is different from :py:meth:`~sentry_sdk.tracing.Span.continue_from_headers` - in that it assumes header names in the form ``HTTP_HEADER_NAME`` - - such as you would get from a WSGI/ASGI environ - - rather than the form ``header-name``. + @property + def is_root_span(self) -> bool: + return self.root_span == self - :param environ: The ASGI/WSGI environ to pull information from. - """ - if cls is Span: - logger.warning( - "Deprecated: use Transaction.continue_from_environ " - "instead of Span.continue_from_environ." - ) - return Transaction.continue_from_headers(EnvironHeaders(environ), **kwargs) + @property + def parent_span_id(self) -> Optional[str]: + if ( + not isinstance(self._otel_span, ReadableSpan) + or self._otel_span.parent is None + ): + return None + return format_span_id(self._otel_span.parent.span_id) - @classmethod - def continue_from_headers( - cls, - headers, # type: Mapping[str, str] - *, - _sample_rand=None, # type: Optional[str] - **kwargs, # type: Any - ): - # type: (...) -> Transaction - """ - Create a transaction with the given params (including any data pulled from - the ``sentry-trace`` and ``baggage`` headers). + @property + def trace_id(self) -> str: + return format_trace_id(self._otel_span.get_span_context().trace_id) - :param headers: The dictionary with the HTTP headers to pull information from. - :param _sample_rand: If provided, we override the sample_rand value from the - incoming headers with this value. (internal use only) - """ - # TODO move this to the Transaction class - if cls is Span: - logger.warning( - "Deprecated: use Transaction.continue_from_headers " - "instead of Span.continue_from_headers." - ) + @property + def span_id(self) -> str: + return format_span_id(self._otel_span.get_span_context().span_id) - # TODO-neel move away from this kwargs stuff, it's confusing and opaque - # make more explicit - baggage = Baggage.from_incoming_header( - headers.get(BAGGAGE_HEADER_NAME), _sample_rand=_sample_rand + @property + def is_valid(self) -> bool: + return self._otel_span.get_span_context().is_valid and isinstance( + self._otel_span, ReadableSpan ) - kwargs.update({BAGGAGE_HEADER_NAME: baggage}) - sentrytrace_kwargs = extract_sentrytrace_data( - headers.get(SENTRY_TRACE_HEADER_NAME) + @property + def sampled(self) -> Optional[bool]: + return self._otel_span.get_span_context().trace_flags.sampled + + @property + def sample_rate(self) -> Optional[float]: + sample_rate = self._otel_span.get_span_context().trace_state.get( + TRACESTATE_SAMPLE_RATE_KEY ) + return float(sample_rate) if sample_rate is not None else None - if sentrytrace_kwargs is not None: - kwargs.update(sentrytrace_kwargs) + @property + def op(self) -> Optional[str]: + return self.get_attribute(SentrySpanAttribute.OP) - # If there's an incoming sentry-trace but no incoming baggage header, - # for instance in traces coming from older SDKs, - # baggage will be empty and immutable and won't be populated as head SDK. - baggage.freeze() + @op.setter + def op(self, value: Optional[str]) -> None: + self.set_attribute(SentrySpanAttribute.OP, value) - transaction = Transaction(**kwargs) - transaction.same_process_as_parent = False + @property + def name(self) -> Optional[str]: + return self.get_attribute(SentrySpanAttribute.NAME) - return transaction + @name.setter + def name(self, value: str) -> None: + self._otel_span.update_name(value) + self.set_attribute(SentrySpanAttribute.NAME, value) - def iter_headers(self): - # type: () -> Iterator[Tuple[str, str]] - """ - Creates a generator which returns the span's ``sentry-trace`` and ``baggage`` headers. - If the span's containing transaction doesn't yet have a ``baggage`` value, - this will cause one to be generated and stored. - """ - if not self.containing_transaction: - # Do not propagate headers if there is no containing transaction. Otherwise, this - # span ends up being the root span of a new trace, and since it does not get sent - # to Sentry, the trace will be missing a root transaction. The dynamic sampling - # context will also be missing, breaking dynamic sampling & traces. - return + @property + def source(self) -> str: + return ( + self.get_attribute(SentrySpanAttribute.SOURCE) or TransactionSource.CUSTOM + ) - yield SENTRY_TRACE_HEADER_NAME, self.to_traceparent() + @source.setter + def source(self, value: str) -> None: + self.set_attribute(SentrySpanAttribute.SOURCE, value) - baggage = self.containing_transaction.get_baggage().serialize() - if baggage: - yield BAGGAGE_HEADER_NAME, baggage - - @classmethod - def from_traceparent( - cls, - traceparent, # type: Optional[str] - **kwargs, # type: Any - ): - # type: (...) -> Optional[Transaction] - """ - DEPRECATED: Use :py:meth:`sentry_sdk.tracing.Span.continue_from_headers`. + @property + def start_timestamp(self) -> Optional[datetime]: + if not isinstance(self._otel_span, ReadableSpan): + return None - Create a ``Transaction`` with the given params, then add in data pulled from - the given ``sentry-trace`` header value before returning the ``Transaction``. - """ - logger.warning( - "Deprecated: Use Transaction.continue_from_headers(headers, **kwargs) " - "instead of from_traceparent(traceparent, **kwargs)" - ) + start_time = self._otel_span.start_time + if start_time is None: + return None + + return convert_from_otel_timestamp(start_time) - if not traceparent: + @property + def timestamp(self) -> Optional[datetime]: + if not isinstance(self._otel_span, ReadableSpan): return None - return cls.continue_from_headers( - {SENTRY_TRACE_HEADER_NAME: traceparent}, **kwargs - ) + end_time = self._otel_span.end_time + if end_time is None: + return None + + return convert_from_otel_timestamp(end_time) + + def start_child(self, **kwargs: Any) -> Span: + return Span(parent_span=self, **kwargs) - def to_traceparent(self): - # type: () -> str + def iter_headers(self) -> Iterator[Tuple[str, str]]: + yield SENTRY_TRACE_HEADER_NAME, self.to_traceparent() + yield BAGGAGE_HEADER_NAME, serialize_trace_state(self.trace_state) + + def to_traceparent(self) -> str: if self.sampled is True: sampled = "1" elif self.sampled is False: @@ -585,773 +417,151 @@ def to_traceparent(self): return traceparent - def to_baggage(self): - # type: () -> Optional[Baggage] - """Returns the :py:class:`~sentry_sdk.tracing_utils.Baggage` - associated with this ``Span``, if any. (Taken from the root of the span tree.) - """ - if self.containing_transaction: - return self.containing_transaction.get_baggage() - return None - - def set_tag(self, key, value): - # type: (str, Any) -> None - self._tags[key] = value - - def set_data(self, key, value): - # type: (str, Any) -> None - self._data[key] = value - - def update_data(self, data): - # type: (Dict[str, Any]) -> None - self._data.update(data) + @property + def trace_state(self) -> TraceState: + return get_trace_state(self._otel_span) - def set_flag(self, flag, result): - # type: (str, bool) -> None - if len(self._flags) < self._flags_capacity: - self._flags[flag] = result + def to_baggage(self) -> Baggage: + return self.get_baggage() - def set_status(self, value): - # type: (str) -> None - self.status = value + def get_baggage(self) -> Baggage: + return baggage_from_trace_state(self.trace_state) - def set_measurement(self, name, value, unit=""): - # type: (str, float, MeasurementUnit) -> None - """ - .. deprecated:: 2.28.0 - This function is deprecated and will be removed in the next major release. - """ + def set_tag(self, key: str, value: Any) -> None: + self.set_attribute(f"{SentrySpanAttribute.TAG}.{key}", value) + def set_data(self, key: str, value: Any) -> None: warnings.warn( - "`set_measurement()` is deprecated and will be removed in the next major version. Please use `set_data()` instead.", + "`Span.set_data` is deprecated. Please use `Span.set_attribute` instead.", DeprecationWarning, stacklevel=2, ) - self._measurements[name] = {"value": value, "unit": unit} - - def set_thread(self, thread_id, thread_name): - # type: (Optional[int], Optional[str]) -> None - - if thread_id is not None: - self.set_data(SPANDATA.THREAD_ID, str(thread_id)) - - if thread_name is not None: - self.set_data(SPANDATA.THREAD_NAME, thread_name) - - def set_profiler_id(self, profiler_id): - # type: (Optional[str]) -> None - if profiler_id is not None: - self.set_data(SPANDATA.PROFILER_ID, profiler_id) - - def set_http_status(self, http_status): - # type: (int) -> None - self.set_tag( - "http.status_code", str(http_status) - ) # we keep this for backwards compatibility - self.set_data(SPANDATA.HTTP_STATUS_CODE, http_status) - self.set_status(get_span_status_from_http_code(http_status)) - - def is_success(self): - # type: () -> bool - return self.status == "ok" - - def finish(self, scope=None, end_timestamp=None): - # type: (Optional[sentry_sdk.Scope], Optional[Union[float, datetime]]) -> Optional[str] - """ - Sets the end timestamp of the span. - - Additionally it also creates a breadcrumb from the span, - if the span represents a database or HTTP request. - - :param scope: The scope to use for this transaction. - If not provided, the current scope will be used. - :param end_timestamp: Optional timestamp that should - be used as timestamp instead of the current time. - - :return: Always ``None``. The type is ``Optional[str]`` to match - the return value of :py:meth:`sentry_sdk.tracing.Transaction.finish`. - """ - if self.timestamp is not None: - # This span is already finished, ignore. - return None - - try: - if end_timestamp: - if isinstance(end_timestamp, float): - end_timestamp = datetime.fromtimestamp(end_timestamp, timezone.utc) - self.timestamp = end_timestamp - else: - elapsed = nanosecond_time() - self._start_timestamp_monotonic_ns - self.timestamp = self.start_timestamp + timedelta( - microseconds=elapsed / 1000 - ) - except AttributeError: - self.timestamp = datetime.now(timezone.utc) - - scope = scope or sentry_sdk.get_current_scope() - maybe_create_breadcrumbs_from_span(scope, self) - - return None - - def to_json(self): - # type: () -> Dict[str, Any] - """Returns a JSON-compatible representation of the span.""" - - rv = { - "trace_id": self.trace_id, - "span_id": self.span_id, - "parent_span_id": self.parent_span_id, - "same_process_as_parent": self.same_process_as_parent, - "op": self.op, - "description": self.description, - "start_timestamp": self.start_timestamp, - "timestamp": self.timestamp, - "origin": self.origin, - } # type: Dict[str, Any] - - if self.status: - self._tags["status"] = self.status - - if self._local_aggregator is not None: - metrics_summary = self._local_aggregator.to_json() - if metrics_summary: - rv["_metrics_summary"] = metrics_summary - - if len(self._measurements) > 0: - rv["measurements"] = self._measurements - - tags = self._tags - if tags: - rv["tags"] = tags - - data = {} - data.update(self._flags) - data.update(self._data) - if data: - rv["data"] = data - - return rv - - def get_trace_context(self): - # type: () -> Any - rv = { - "trace_id": self.trace_id, - "span_id": self.span_id, - "parent_span_id": self.parent_span_id, - "op": self.op, - "description": self.description, - "origin": self.origin, - } # type: Dict[str, Any] - if self.status: - rv["status"] = self.status - - if self.containing_transaction: - rv["dynamic_sampling_context"] = ( - self.containing_transaction.get_baggage().dynamic_sampling_context() - ) - - data = {} - - thread_id = self._data.get(SPANDATA.THREAD_ID) - if thread_id is not None: - data["thread.id"] = thread_id - thread_name = self._data.get(SPANDATA.THREAD_NAME) - if thread_name is not None: - data["thread.name"] = thread_name + # TODO-neel-potel we cannot add dicts here + self.set_attribute(key, value) - if data: - rv["data"] = data - - return rv - - def get_profile_context(self): - # type: () -> Optional[ProfileContext] - profiler_id = self._data.get(SPANDATA.PROFILER_ID) - if profiler_id is None: + def get_attribute(self, name: str) -> Optional[Any]: + if ( + not isinstance(self._otel_span, ReadableSpan) + or not self._otel_span.attributes + ): return None + return self._otel_span.attributes.get(name) - return { - "profiler_id": profiler_id, - } - - def update_active_thread(self): - # type: () -> None - thread_id, thread_name = get_current_thread_meta() - self.set_thread(thread_id, thread_name) - - -class Transaction(Span): - """The Transaction is the root element that holds all the spans - for Sentry performance instrumentation. - - :param name: Identifier of the transaction. - Will show up in the Sentry UI. - :param parent_sampled: Whether the parent transaction was sampled. - If True this transaction will be kept, if False it will be discarded. - :param baggage: The W3C baggage header value. - (see https://www.w3.org/TR/baggage/) - :param source: A string describing the source of the transaction name. - This will be used to determine the transaction's type. - See https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations - for more information. Default "custom". - :param kwargs: Additional arguments to be passed to the Span constructor. - See :py:class:`sentry_sdk.tracing.Span` for available arguments. - """ - - __slots__ = ( - "name", - "source", - "parent_sampled", - # used to create baggage value for head SDKs in dynamic sampling - "sample_rate", - "_measurements", - "_contexts", - "_profile", - "_continuous_profile", - "_baggage", - "_sample_rand", - ) - - def __init__( # type: ignore[misc] - self, - name="", # type: str - parent_sampled=None, # type: Optional[bool] - baggage=None, # type: Optional[Baggage] - source=TransactionSource.CUSTOM, # type: str - **kwargs, # type: Unpack[SpanKwargs] - ): - # type: (...) -> None - - super().__init__(**kwargs) - - self.name = name - self.source = source - self.sample_rate = None # type: Optional[float] - self.parent_sampled = parent_sampled - self._measurements = {} # type: Dict[str, MeasurementValue] - self._contexts = {} # type: Dict[str, Any] - self._profile = None # type: Optional[Profile] - self._continuous_profile = None # type: Optional[ContinuousProfile] - self._baggage = baggage - - baggage_sample_rand = ( - None if self._baggage is None else self._baggage._sample_rand() - ) - if baggage_sample_rand is not None: - self._sample_rand = baggage_sample_rand - else: - self._sample_rand = _generate_sample_rand(self.trace_id) - - def __repr__(self): - # type: () -> str - return ( - "<%s(name=%r, op=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, source=%r, origin=%r)>" - % ( - self.__class__.__name__, - self.name, - self.op, - self.trace_id, - self.span_id, - self.parent_span_id, - self.sampled, - self.source, - self.origin, - ) - ) - - def _possibly_started(self): - # type: () -> bool - """Returns whether the transaction might have been started. - - If this returns False, we know that the transaction was not started - with sentry_sdk.start_transaction, and therefore the transaction will - be discarded. - """ - - # We must explicitly check self.sampled is False since self.sampled can be None - return self._span_recorder is not None or self.sampled is False - - def __enter__(self): - # type: () -> Transaction - if not self._possibly_started(): - logger.debug( - "Transaction was entered without being started with sentry_sdk.start_transaction." - "The transaction will not be sent to Sentry. To fix, start the transaction by" - "passing it to sentry_sdk.start_transaction." - ) - - super().__enter__() - - if self._profile is not None: - self._profile.__enter__() - - return self - - def __exit__(self, ty, value, tb): - # type: (Optional[Any], Optional[Any], Optional[Any]) -> None - if self._profile is not None: - self._profile.__exit__(ty, value, tb) + def set_attribute(self, key: str, value: Any) -> None: + # otel doesn't support None as values, preferring to not set the key + # at all instead + if value is None: + return + serialized_value = _serialize_span_attribute(value) + if serialized_value is None: + return - if self._continuous_profile is not None: - self._continuous_profile.stop() + self._otel_span.set_attribute(key, serialized_value) - super().__exit__(ty, value, tb) + def set_attributes(self, attributes: dict[str, Any]) -> None: + for key, value in attributes.items(): + self.set_attribute(key, value) @property - def containing_transaction(self): - # type: () -> Transaction - """The root element of the span tree. - In the case of a transaction it is the transaction itself. + def status(self) -> Optional[str]: """ - - # Transactions (as spans) belong to themselves (as transactions). This - # is a getter rather than a regular attribute to avoid having a circular - # reference. - return self - - def _get_scope_from_finish_args( - self, - scope_arg, # type: Optional[Union[sentry_sdk.Scope, sentry_sdk.Hub]] - hub_arg, # type: Optional[Union[sentry_sdk.Scope, sentry_sdk.Hub]] - ): - # type: (...) -> Optional[sentry_sdk.Scope] - """ - Logic to get the scope from the arguments passed to finish. This - function exists for backwards compatibility with the old finish. - - TODO: Remove this function in the next major version. - """ - scope_or_hub = scope_arg - if hub_arg is not None: - warnings.warn( - "The `hub` parameter is deprecated. Please use the `scope` parameter, instead.", - DeprecationWarning, - stacklevel=3, - ) - - scope_or_hub = hub_arg - - if isinstance(scope_or_hub, sentry_sdk.Hub): - warnings.warn( - "Passing a Hub to finish is deprecated. Please pass a Scope, instead.", - DeprecationWarning, - stacklevel=3, - ) - - return scope_or_hub.scope - - return scope_or_hub - - def finish( - self, - scope=None, # type: Optional[sentry_sdk.Scope] - end_timestamp=None, # type: Optional[Union[float, datetime]] - *, - hub=None, # type: Optional[sentry_sdk.Hub] - ): - # type: (...) -> Optional[str] - """Finishes the transaction and sends it to Sentry. - All finished spans in the transaction will also be sent to Sentry. - - :param scope: The Scope to use for this transaction. - If not provided, the current Scope will be used. - :param end_timestamp: Optional timestamp that should - be used as timestamp instead of the current time. - :param hub: The hub to use for this transaction. - This argument is DEPRECATED. Please use the `scope` - parameter, instead. - - :return: The event ID if the transaction was sent to Sentry, - otherwise None. + Return the Sentry `SPANSTATUS` corresponding to the underlying OTel status. + Because differences in possible values in OTel `StatusCode` and + Sentry `SPANSTATUS` it can not be guaranteed that the status + set in `set_status()` will be the same as the one returned here. """ - if self.timestamp is not None: - # This transaction is already finished, ignore. - return None - - # For backwards compatibility, we must handle the case where `scope` - # or `hub` could both either be a `Scope` or a `Hub`. - scope = self._get_scope_from_finish_args( - scope, hub - ) # type: Optional[sentry_sdk.Scope] - - scope = scope or self.scope or sentry_sdk.get_current_scope() - client = sentry_sdk.get_client() - - if not client.is_active(): - # We have no active client and therefore nowhere to send this transaction. + if not isinstance(self._otel_span, ReadableSpan): return None - if self._span_recorder is None: - # Explicit check against False needed because self.sampled might be None - if self.sampled is False: - logger.debug("Discarding transaction because sampled = False") - else: - logger.debug( - "Discarding transaction because it was not started with sentry_sdk.start_transaction" - ) - - # This is not entirely accurate because discards here are not - # exclusively based on sample rate but also traces sampler, but - # we handle this the same here. - if client.transport and has_tracing_enabled(client.options): - if client.monitor and client.monitor.downsample_factor > 0: - reason = "backpressure" - else: - reason = "sample_rate" - - client.transport.record_lost_event(reason, data_category="transaction") - - # Only one span (the transaction itself) is discarded, since we did not record any spans here. - client.transport.record_lost_event(reason, data_category="span") - return None - - if not self.name: - logger.warning( - "Transaction has no name, falling back to ``." - ) - self.name = "" - - super().finish(scope, end_timestamp) - - if not self.sampled: - # At this point a `sampled = None` should have already been resolved - # to a concrete decision. - if self.sampled is None: - logger.warning("Discarding transaction without sampling decision.") - + if self._otel_span.status.status_code == StatusCode.UNSET: return None + elif self._otel_span.status.status_code == StatusCode.OK: + return SPANSTATUS.OK + else: + return SPANSTATUS.UNKNOWN_ERROR - finished_spans = [ - span.to_json() - for span in self._span_recorder.spans - if span.timestamp is not None - ] - - len_diff = len(self._span_recorder.spans) - len(finished_spans) - dropped_spans = len_diff + self._span_recorder.dropped_spans - - # we do this to break the circular reference of transaction -> span - # recorder -> span -> containing transaction (which is where we started) - # before either the spans or the transaction goes out of scope and has - # to be garbage collected - self._span_recorder = None - - contexts = {} - contexts.update(self._contexts) - contexts.update({"trace": self.get_trace_context()}) - profile_context = self.get_profile_context() - if profile_context is not None: - contexts.update({"profile": profile_context}) - - event = { - "type": "transaction", - "transaction": self.name, - "transaction_info": {"source": self.source}, - "contexts": contexts, - "tags": self._tags, - "timestamp": self.timestamp, - "start_timestamp": self.start_timestamp, - "spans": finished_spans, - } # type: Event - - if dropped_spans > 0: - event["_dropped_spans"] = dropped_spans - - if self._profile is not None and self._profile.valid(): - event["profile"] = self._profile - self._profile = None - - event["measurements"] = self._measurements - - # This is here since `to_json` is not invoked. This really should - # be gone when we switch to onlyspans. - if self._local_aggregator is not None: - metrics_summary = self._local_aggregator.to_json() - if metrics_summary: - event["_metrics_summary"] = metrics_summary - - return scope.capture_event(event) - - def set_measurement(self, name, value, unit=""): - # type: (str, float, MeasurementUnit) -> None - """ - .. deprecated:: 2.28.0 - This function is deprecated and will be removed in the next major release. - """ - - warnings.warn( - "`set_measurement()` is deprecated and will be removed in the next major version. Please use `set_data()` instead.", - DeprecationWarning, - stacklevel=2, - ) - self._measurements[name] = {"value": value, "unit": unit} - - def set_context(self, key, value): - # type: (str, dict[str, Any]) -> None - """Sets a context. Transactions can have multiple contexts - and they should follow the format described in the "Contexts Interface" - documentation. - - :param key: The name of the context. - :param value: The information about the context. - """ - self._contexts[key] = value - - def set_http_status(self, http_status): - # type: (int) -> None - """Sets the status of the Transaction according to the given HTTP status. - - :param http_status: The HTTP status code.""" - super().set_http_status(http_status) - self.set_context("response", {"status_code": http_status}) - - def to_json(self): - # type: () -> Dict[str, Any] - """Returns a JSON-compatible representation of the transaction.""" - rv = super().to_json() - - rv["name"] = self.name - rv["source"] = self.source - rv["sampled"] = self.sampled + def set_status(self, status: str) -> None: + if status == SPANSTATUS.OK: + otel_status = StatusCode.OK + otel_description = None + else: + otel_status = StatusCode.ERROR + otel_description = status - return rv + if _OTEL_VERSION is None or _OTEL_VERSION >= (1, 12, 0): + self._otel_span.set_status(otel_status, otel_description) + else: + self._otel_span.set_status(Status(otel_status, otel_description)) - def get_trace_context(self): - # type: () -> Any - trace_context = super().get_trace_context() + def set_thread(self, thread_id: Optional[int], thread_name: Optional[str]) -> None: + if thread_id is not None: + self.set_attribute(SPANDATA.THREAD_ID, str(thread_id)) - if self._data: - trace_context["data"] = self._data + if thread_name is not None: + self.set_attribute(SPANDATA.THREAD_NAME, thread_name) - return trace_context + def update_active_thread(self) -> None: + thread_id, thread_name = get_current_thread_meta() + self.set_thread(thread_id, thread_name) - def get_baggage(self): - # type: () -> Baggage - """Returns the :py:class:`~sentry_sdk.tracing_utils.Baggage` - associated with the Transaction. + def set_http_status(self, http_status: int) -> None: + self.set_attribute(SPANDATA.HTTP_STATUS_CODE, http_status) + self.set_status(get_span_status_from_http_code(http_status)) - The first time a new baggage with Sentry items is made, - it will be frozen.""" - if not self._baggage or self._baggage.mutable: - self._baggage = Baggage.populate_from_transaction(self) + def is_success(self) -> bool: + return self.status == SPANSTATUS.OK - return self._baggage + def finish(self, end_timestamp: Optional[Union[float, datetime]] = None) -> None: + if end_timestamp is not None: + self._otel_span.end(convert_to_otel_timestamp(end_timestamp)) + else: + self._otel_span.end() - def _set_initial_sampling_decision(self, sampling_context): - # type: (SamplingContext) -> None + def to_json(self) -> dict[str, Any]: """ - Sets the transaction's sampling decision, according to the following - precedence rules: - - 1. If a sampling decision is passed to `start_transaction` - (`start_transaction(name: "my transaction", sampled: True)`), that - decision will be used, regardless of anything else - - 2. If `traces_sampler` is defined, its decision will be used. It can - choose to keep or ignore any parent sampling decision, or use the - sampling context data to make its own decision or to choose a sample - rate for the transaction. - - 3. If `traces_sampler` is not defined, but there's a parent sampling - decision, the parent sampling decision will be used. - - 4. If `traces_sampler` is not defined and there's no parent sampling - decision, `traces_sample_rate` will be used. + Only meant for testing. Not used internally anymore. """ - client = sentry_sdk.get_client() - - transaction_description = "{op}transaction <{name}>".format( - op=("<" + self.op + "> " if self.op else ""), name=self.name - ) - - # nothing to do if tracing is disabled - if not has_tracing_enabled(client.options): - self.sampled = False - return - - # if the user has forced a sampling decision by passing a `sampled` - # value when starting the transaction, go with that - if self.sampled is not None: - self.sample_rate = float(self.sampled) - return - - # we would have bailed already if neither `traces_sampler` nor - # `traces_sample_rate` were defined, so one of these should work; prefer - # the hook if so - sample_rate = ( - client.options["traces_sampler"](sampling_context) - if callable(client.options.get("traces_sampler")) - else ( - # default inheritance behavior - sampling_context["parent_sampled"] - if sampling_context["parent_sampled"] is not None - else client.options["traces_sample_rate"] - ) - ) - - # Since this is coming from the user (or from a function provided by the - # user), who knows what we might get. (The only valid values are - # booleans or numbers between 0 and 1.) - if not is_valid_sample_rate(sample_rate, source="Tracing"): - logger.warning( - "[Tracing] Discarding {transaction_description} because of invalid sample rate.".format( - transaction_description=transaction_description, - ) - ) - self.sampled = False - return - - self.sample_rate = float(sample_rate) - - if client.monitor: - self.sample_rate /= 2**client.monitor.downsample_factor - - # if the function returned 0 (or false), or if `traces_sample_rate` is - # 0, it's a sign the transaction should be dropped - if not self.sample_rate: - logger.debug( - "[Tracing] Discarding {transaction_description} because {reason}".format( - transaction_description=transaction_description, - reason=( - "traces_sampler returned 0 or False" - if callable(client.options.get("traces_sampler")) - else "traces_sample_rate is set to 0" - ), - ) - ) - self.sampled = False - return - - # Now we roll the dice. - self.sampled = self._sample_rand < Decimal.from_float(self.sample_rate) - - if self.sampled: - logger.debug( - "[Tracing] Starting {transaction_description}".format( - transaction_description=transaction_description, - ) - ) - else: - logger.debug( - "[Tracing] Discarding {transaction_description} because it's not included in the random sample (sampling rate = {sample_rate})".format( - transaction_description=transaction_description, - sample_rate=self.sample_rate, - ) - ) + if not isinstance(self._otel_span, ReadableSpan): + return {} + return json.loads(self._otel_span.to_json()) + def get_trace_context(self) -> dict[str, Any]: + if not isinstance(self._otel_span, ReadableSpan): + return {} -class NoOpSpan(Span): - def __repr__(self): - # type: () -> str - return "<%s>" % self.__class__.__name__ + return get_trace_context(self._otel_span) - @property - def containing_transaction(self): - # type: () -> Optional[Transaction] - return None + def set_context(self, key: str, value: Any) -> None: + # TODO-neel-potel we cannot add dicts here - def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): - # type: (str, **Any) -> NoOpSpan - return NoOpSpan() + self.set_attribute(f"{SentrySpanAttribute.CONTEXT}.{key}", value) - def to_traceparent(self): - # type: () -> str - return "" + def set_flag(self, flag: str, value: bool) -> None: + flag_count = self.get_attribute("_flag.count") or 0 + if flag_count < _FLAGS_CAPACITY: + self.set_attribute(f"flag.evaluation.{flag}", value) + self.set_attribute("_flag.count", flag_count + 1) - def to_baggage(self): - # type: () -> Optional[Baggage] - return None - def get_baggage(self): - # type: () -> Optional[Baggage] - return None - - def iter_headers(self): - # type: () -> Iterator[Tuple[str, str]] - return iter(()) - - def set_tag(self, key, value): - # type: (str, Any) -> None - pass - - def set_data(self, key, value): - # type: (str, Any) -> None - pass - - def update_data(self, data): - # type: (Dict[str, Any]) -> None - pass - - def set_status(self, value): - # type: (str) -> None - pass - - def set_http_status(self, http_status): - # type: (int) -> None - pass - - def is_success(self): - # type: () -> bool - return True - - def to_json(self): - # type: () -> Dict[str, Any] - return {} - - def get_trace_context(self): - # type: () -> Any - return {} - - def get_profile_context(self): - # type: () -> Any - return {} - - def finish( - self, - scope=None, # type: Optional[sentry_sdk.Scope] - end_timestamp=None, # type: Optional[Union[float, datetime]] - *, - hub=None, # type: Optional[sentry_sdk.Hub] - ): - # type: (...) -> Optional[str] - """ - The `hub` parameter is deprecated. Please use the `scope` parameter, instead. - """ - pass - - def set_measurement(self, name, value, unit=""): - # type: (str, float, MeasurementUnit) -> None - pass - - def set_context(self, key, value): - # type: (str, dict[str, Any]) -> None - pass - - def init_span_recorder(self, maxlen): - # type: (int) -> None - pass - - def _set_initial_sampling_decision(self, sampling_context): - # type: (SamplingContext) -> None - pass +# TODO-neel-potel add deprecation +Transaction = Span if TYPE_CHECKING: @overload - def trace(func=None): - # type: (None) -> Callable[[Callable[P, R]], Callable[P, R]] + def trace(func: None = None) -> Callable[[Callable[P, R]], Callable[P, R]]: pass @overload - def trace(func): - # type: (Callable[P, R]) -> Callable[P, R] + def trace(func: Callable[P, R]) -> Callable[P, R]: pass -def trace(func=None): - # type: (Optional[Callable[P, R]]) -> Union[Callable[P, R], Callable[[Callable[P, R]], Callable[P, R]]] +def trace( + func: Optional[Callable[P, R]] = None, +) -> Union[Callable[P, R], Callable[[Callable[P, R]], Callable[P, R]]]: """ Decorator to start a child span under the existing current transaction. If there is no current transaction, then nothing will be traced. @@ -1377,20 +587,3 @@ async def my_async_function(): return start_child_span_decorator(func) else: return start_child_span_decorator - - -# Circular imports - -from sentry_sdk.tracing_utils import ( - Baggage, - EnvironHeaders, - extract_sentrytrace_data, - _generate_sample_rand, - has_tracing_enabled, - maybe_create_breadcrumbs_from_span, -) - -with warnings.catch_warnings(): - # The code in this file which uses `LocalAggregator` is only called from the deprecated `metrics` module. - warnings.simplefilter("ignore", DeprecationWarning) - from sentry_sdk.metrics import LocalAggregator diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index 552f4fd59a..473e6d5e91 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -1,18 +1,25 @@ +from __future__ import annotations import contextlib import inspect import os import re import sys +import uuid from collections.abc import Mapping -from datetime import timedelta +from datetime import datetime, timedelta, timezone from decimal import ROUND_DOWN, Decimal, DefaultContext, localcontext from functools import wraps from random import Random from urllib.parse import quote, unquote -import uuid import sentry_sdk -from sentry_sdk.consts import OP, SPANDATA +from sentry_sdk.consts import ( + OP, + SPANDATA, + SPANSTATUS, + BAGGAGE_HEADER_NAME, + SENTRY_TRACE_HEADER_NAME, +) from sentry_sdk.utils import ( capture_internal_exceptions, filename_for_module, @@ -21,7 +28,6 @@ match_regex_list, qualname_from_function, to_string, - try_convert, is_sentry_url, _is_external_source, _is_in_project_root, @@ -31,13 +37,8 @@ from typing import TYPE_CHECKING if TYPE_CHECKING: - from typing import Any - from typing import Dict - from typing import Generator - from typing import Optional - from typing import Union - from types import FrameType + from typing import Any, Dict, Generator, Optional, Union SENTRY_TRACE_REGEX = re.compile( @@ -64,23 +65,19 @@ class EnvironHeaders(Mapping): # type: ignore def __init__( self, - environ, # type: Mapping[str, str] - prefix="HTTP_", # type: str - ): - # type: (...) -> None + environ: Mapping[str, str], + prefix: str = "HTTP_", + ) -> None: self.environ = environ self.prefix = prefix - def __getitem__(self, key): - # type: (str) -> Optional[Any] + def __getitem__(self, key: str) -> Optional[Any]: return self.environ[self.prefix + key.replace("-", "_").upper()] - def __len__(self): - # type: () -> int + def __len__(self) -> int: return sum(1 for _ in iter(self)) - def __iter__(self): - # type: () -> Generator[str, None, None] + def __iter__(self) -> Generator[str, None, None]: for k in self.environ: if not isinstance(k, str): continue @@ -92,36 +89,30 @@ def __iter__(self): yield k[len(self.prefix) :] -def has_tracing_enabled(options): - # type: (Optional[Dict[str, Any]]) -> bool +def has_tracing_enabled(options: dict[str, Any]) -> bool: """ Returns True if either traces_sample_rate or traces_sampler is - defined and enable_tracing is set and not false. + defined. """ if options is None: return False return bool( - options.get("enable_tracing") is not False - and ( - options.get("traces_sample_rate") is not None - or options.get("traces_sampler") is not None - ) + options.get("traces_sample_rate") is not None + or options.get("traces_sampler") is not None ) @contextlib.contextmanager def record_sql_queries( - cursor, # type: Any - query, # type: Any - params_list, # type: Any - paramstyle, # type: Optional[str] - executemany, # type: bool - record_cursor_repr=False, # type: bool - span_origin="manual", # type: str -): - # type: (...) -> Generator[sentry_sdk.tracing.Span, None, None] - + cursor: Any, + query: Any, + params_list: Any, + paramstyle: Optional[str], + executemany: bool, + record_cursor_repr: bool = False, + span_origin: Optional[str] = None, +) -> Generator[sentry_sdk.tracing.Span, None, None]: # TODO: Bring back capturing of params by default if sentry_sdk.get_client().options["_experiments"].get("record_sql_params", False): if not params_list or params_list == [None]: @@ -152,46 +143,14 @@ def record_sql_queries( op=OP.DB, name=query, origin=span_origin, + only_as_child_span=True, ) as span: for k, v in data.items(): - span.set_data(k, v) + span.set_attribute(k, v) yield span -def maybe_create_breadcrumbs_from_span(scope, span): - # type: (sentry_sdk.Scope, sentry_sdk.tracing.Span) -> None - if span.op == OP.DB_REDIS: - scope.add_breadcrumb( - message=span.description, type="redis", category="redis", data=span._tags - ) - - elif span.op == OP.HTTP_CLIENT: - level = None - status_code = span._data.get(SPANDATA.HTTP_STATUS_CODE) - if status_code: - if 500 <= status_code <= 599: - level = "error" - elif 400 <= status_code <= 499: - level = "warning" - - if level: - scope.add_breadcrumb( - type="http", category="httplib", data=span._data, level=level - ) - else: - scope.add_breadcrumb(type="http", category="httplib", data=span._data) - - elif span.op == "subprocess": - scope.add_breadcrumb( - type="subprocess", - category="subprocess", - message=span.description, - data=span._data, - ) - - -def _get_frame_module_abs_path(frame): - # type: (FrameType) -> Optional[str] +def _get_frame_module_abs_path(frame: FrameType) -> Optional[str]: try: return frame.f_code.co_filename except Exception: @@ -199,14 +158,13 @@ def _get_frame_module_abs_path(frame): def _should_be_included( - is_sentry_sdk_frame, # type: bool - namespace, # type: Optional[str] - in_app_include, # type: Optional[list[str]] - in_app_exclude, # type: Optional[list[str]] - abs_path, # type: Optional[str] - project_root, # type: Optional[str] -): - # type: (...) -> bool + is_sentry_sdk_frame: bool, + namespace: Optional[str], + in_app_include: Optional[list[str]], + in_app_exclude: Optional[list[str]], + abs_path: Optional[str], + project_root: Optional[str], +) -> bool: # in_app_include takes precedence over in_app_exclude should_be_included = _module_in_list(namespace, in_app_include) should_be_excluded = _is_external_source(abs_path) or _module_in_list( @@ -218,8 +176,7 @@ def _should_be_included( ) -def add_query_source(span): - # type: (sentry_sdk.tracing.Span) -> None +def add_query_source(span: sentry_sdk.tracing.Span) -> None: """ Adds OTel compatible source code information to the span """ @@ -227,14 +184,17 @@ def add_query_source(span): if not client.is_active(): return - if span.timestamp is None or span.start_timestamp is None: + if span.start_timestamp is None: return should_add_query_source = client.options.get("enable_db_query_source", True) if not should_add_query_source: return - duration = span.timestamp - span.start_timestamp + # We assume here that the query is just ending now. We can't use + # the actual end timestamp of the span because in OTel the span + # can't be finished in order to set any attributes on it. + duration = datetime.now(tz=timezone.utc) - span.start_timestamp threshold = client.options.get("db_query_source_threshold_ms", 0) slow_query = duration / timedelta(milliseconds=1) > threshold @@ -246,12 +206,12 @@ def add_query_source(span): in_app_exclude = client.options.get("in_app_exclude") # Find the correct frame - frame = sys._getframe() # type: Union[FrameType, None] + frame: Optional[FrameType] = sys._getframe() while frame is not None: abs_path = _get_frame_module_abs_path(frame) try: - namespace = frame.f_globals.get("__name__") # type: Optional[str] + namespace: Optional[str] = frame.f_globals.get("__name__") except Exception: namespace = None @@ -281,14 +241,14 @@ def add_query_source(span): except Exception: lineno = None if lineno is not None: - span.set_data(SPANDATA.CODE_LINENO, frame.f_lineno) + span.set_attribute(SPANDATA.CODE_LINENO, frame.f_lineno) try: namespace = frame.f_globals.get("__name__") except Exception: namespace = None if namespace is not None: - span.set_data(SPANDATA.CODE_NAMESPACE, namespace) + span.set_attribute(SPANDATA.CODE_NAMESPACE, namespace) filepath = _get_frame_module_abs_path(frame) if filepath is not None: @@ -298,7 +258,7 @@ def add_query_source(span): in_app_path = filepath.replace(project_root, "").lstrip(os.sep) else: in_app_path = filepath - span.set_data(SPANDATA.CODE_FILEPATH, in_app_path) + span.set_attribute(SPANDATA.CODE_FILEPATH, in_app_path) try: code_function = frame.f_code.co_name @@ -306,11 +266,12 @@ def add_query_source(span): code_function = None if code_function is not None: - span.set_data(SPANDATA.CODE_FUNCTION, frame.f_code.co_name) + span.set_attribute(SPANDATA.CODE_FUNCTION, frame.f_code.co_name) -def extract_sentrytrace_data(header): - # type: (Optional[str]) -> Optional[Dict[str, Union[str, bool, None]]] +def extract_sentrytrace_data( + header: Optional[str], +) -> Optional[Dict[str, Union[str, bool, None]]]: """ Given a `sentry-trace` header string, return a dictionary of data. """ @@ -341,8 +302,7 @@ def extract_sentrytrace_data(header): } -def _format_sql(cursor, sql): - # type: (Any, str) -> Optional[str] +def _format_sql(cursor: Any, sql: str) -> Optional[str]: real_sql = None @@ -371,18 +331,17 @@ class PropagationContext: "_span_id", "parent_span_id", "parent_sampled", - "dynamic_sampling_context", + "baggage", ) def __init__( self, - trace_id=None, # type: Optional[str] - span_id=None, # type: Optional[str] - parent_span_id=None, # type: Optional[str] - parent_sampled=None, # type: Optional[bool] - dynamic_sampling_context=None, # type: Optional[Dict[str, str]] - ): - # type: (...) -> None + trace_id: Optional[str] = None, + span_id: Optional[str] = None, + parent_span_id: Optional[str] = None, + parent_sampled: Optional[bool] = None, + baggage: Optional[Baggage] = None, + ) -> None: self._trace_id = trace_id """The trace id of the Sentry trace.""" @@ -398,21 +357,24 @@ def __init__( Important when the parent span originated in an upstream service, because we want to sample the whole trace, or nothing from the trace.""" - self.dynamic_sampling_context = dynamic_sampling_context - """Data that is used for dynamic sampling decisions.""" + self.baggage = baggage + """Baggage object used for dynamic sampling decisions.""" + + @property + def dynamic_sampling_context(self) -> Optional[Dict[str, str]]: + return self.baggage.dynamic_sampling_context() if self.baggage else None @classmethod - def from_incoming_data(cls, incoming_data): - # type: (Dict[str, Any]) -> Optional[PropagationContext] + def from_incoming_data( + cls, incoming_data: Dict[str, Any] + ) -> Optional[PropagationContext]: propagation_context = None normalized_data = normalize_incoming_data(incoming_data) baggage_header = normalized_data.get(BAGGAGE_HEADER_NAME) if baggage_header: propagation_context = PropagationContext() - propagation_context.dynamic_sampling_context = Baggage.from_incoming_header( - baggage_header - ).dynamic_sampling_context() + propagation_context.baggage = Baggage.from_incoming_header(baggage_header) sentry_trace_header = normalized_data.get(SENTRY_TRACE_HEADER_NAME) if sentry_trace_header: @@ -428,23 +390,19 @@ def from_incoming_data(cls, incoming_data): return propagation_context @property - def trace_id(self): - # type: () -> str + def trace_id(self) -> str: """The trace id of the Sentry trace.""" if not self._trace_id: - # New trace, don't fill in sample_rand self._trace_id = uuid.uuid4().hex return self._trace_id @trace_id.setter - def trace_id(self, value): - # type: (str) -> None + def trace_id(self, value: str) -> None: self._trace_id = value @property - def span_id(self): - # type: () -> str + def span_id(self) -> str: """The span id of the currently executed span.""" if not self._span_id: self._span_id = uuid.uuid4().hex[16:] @@ -452,12 +410,24 @@ def span_id(self): return self._span_id @span_id.setter - def span_id(self, value): - # type: (str) -> None + def span_id(self, value: str) -> None: self._span_id = value - def update(self, other_dict): - # type: (Dict[str, Any]) -> None + def to_traceparent(self) -> str: + if self.parent_sampled is True: + sampled = "1" + elif self.parent_sampled is False: + sampled = "0" + else: + sampled = None + + traceparent = "%s-%s" % (self.trace_id, self.span_id) + if sampled is not None: + traceparent += "-%s" % (sampled,) + + return traceparent + + def update(self, other_dict: Dict[str, Any]) -> None: """ Updates the PropagationContext with data from the given dictionary. """ @@ -467,22 +437,11 @@ def update(self, other_dict): except AttributeError: pass - def __repr__(self): - # type: (...) -> str - return "".format( - self._trace_id, - self._span_id, - self.parent_span_id, - self.parent_sampled, - self.dynamic_sampling_context, - ) - - def _fill_sample_rand(self): - # type: () -> None + def _fill_sample_rand(self) -> None: """ - Ensure that there is a valid sample_rand value in the dynamic_sampling_context. + Ensure that there is a valid sample_rand value in the baggage. - If there is a valid sample_rand value in the dynamic_sampling_context, we keep it. + If there is a valid sample_rand value in the baggage, we keep it. Otherwise, we generate a sample_rand value according to the following: - If we have a parent_sampled value and a sample_rate in the DSC, we compute @@ -497,21 +456,33 @@ def _fill_sample_rand(self): This function does nothing if there is no dynamic_sampling_context. """ - if self.dynamic_sampling_context is None: + if self.dynamic_sampling_context is None or self.baggage is None: return - sample_rand = try_convert( - Decimal, self.dynamic_sampling_context.get("sample_rand") - ) + sentry_baggage = self.baggage.sentry_items + + sample_rand = None + if sentry_baggage.get("sample_rand"): + try: + sample_rand = Decimal(sentry_baggage["sample_rand"]) + except Exception: + logger.debug( + f"Failed to convert incoming sample_rand to Decimal: {sample_rand}" + ) + if sample_rand is not None and 0 <= sample_rand < 1: # sample_rand is present and valid, so don't overwrite it return - # Get the sample rate and compute the transformation that will map the random value - # to the desired range: [0, 1), [0, sample_rate), or [sample_rate, 1). - sample_rate = try_convert( - float, self.dynamic_sampling_context.get("sample_rate") - ) + sample_rate = None + if sentry_baggage.get("sample_rate"): + try: + sample_rate = float(sentry_baggage["sample_rate"]) + except Exception: + logger.debug( + f"Failed to convert incoming sample_rate to float: {sample_rate}" + ) + lower, upper = _sample_rand_range(self.parent_sampled, sample_rate) try: @@ -527,17 +498,24 @@ def _fill_sample_rand(self): ) return - self.dynamic_sampling_context["sample_rand"] = ( - f"{sample_rand:.6f}" # noqa: E231 - ) + self.baggage.sentry_items["sample_rand"] = f"{sample_rand:.6f}" # noqa: E231 - def _sample_rand(self): - # type: () -> Optional[str] - """Convenience method to get the sample_rand value from the dynamic_sampling_context.""" - if self.dynamic_sampling_context is None: + def _sample_rand(self) -> Optional[str]: + """Convenience method to get the sample_rand value from the baggage.""" + if self.baggage is None: return None - return self.dynamic_sampling_context.get("sample_rand") + return self.baggage.sentry_items.get("sample_rand") + + def __repr__(self) -> str: + return "".format( + self._trace_id, + self._span_id, + self.parent_span_id, + self.parent_sampled, + self.baggage, + self.dynamic_sampling_context, + ) class Baggage: @@ -556,10 +534,10 @@ class Baggage: def __init__( self, - sentry_items, # type: Dict[str, str] - third_party_items="", # type: str - mutable=True, # type: bool - ): + sentry_items: Dict[str, str], + third_party_items: str = "", + mutable: bool = True, + ) -> None: self.sentry_items = sentry_items self.third_party_items = third_party_items self.mutable = mutable @@ -567,11 +545,8 @@ def __init__( @classmethod def from_incoming_header( cls, - header, # type: Optional[str] - *, - _sample_rand=None, # type: Optional[str] - ): - # type: (...) -> Baggage + header: Optional[str], + ) -> Baggage: """ freeze if incoming header already has sentry baggage """ @@ -594,17 +569,11 @@ def from_incoming_header( else: third_party_items += ("," if third_party_items else "") + item - if _sample_rand is not None: - sentry_items["sample_rand"] = str(_sample_rand) - mutable = False - return Baggage(sentry_items, third_party_items, mutable) @classmethod - def from_options(cls, scope): - # type: (sentry_sdk.scope.Scope) -> Optional[Baggage] - - sentry_items = {} # type: Dict[str, str] + def from_options(cls, scope: sentry_sdk.scope.Scope) -> Optional[Baggage]: + sentry_items: Dict[str, str] = {} third_party_items = "" mutable = False @@ -633,59 +602,10 @@ def from_options(cls, scope): return Baggage(sentry_items, third_party_items, mutable) - @classmethod - def populate_from_transaction(cls, transaction): - # type: (sentry_sdk.tracing.Transaction) -> Baggage - """ - Populate fresh baggage entry with sentry_items and make it immutable - if this is the head SDK which originates traces. - """ - client = sentry_sdk.get_client() - sentry_items = {} # type: Dict[str, str] - - if not client.is_active(): - return Baggage(sentry_items) - - options = client.options or {} - - sentry_items["trace_id"] = transaction.trace_id - sentry_items["sample_rand"] = str(transaction._sample_rand) - - if options.get("environment"): - sentry_items["environment"] = options["environment"] - - if options.get("release"): - sentry_items["release"] = options["release"] - - if options.get("dsn"): - sentry_items["public_key"] = Dsn(options["dsn"]).public_key - - if ( - transaction.name - and transaction.source not in LOW_QUALITY_TRANSACTION_SOURCES - ): - sentry_items["transaction"] = transaction.name - - if transaction.sample_rate is not None: - sentry_items["sample_rate"] = str(transaction.sample_rate) - - if transaction.sampled is not None: - sentry_items["sampled"] = "true" if transaction.sampled else "false" - - # there's an existing baggage but it was mutable, - # which is why we are creating this new baggage. - # However, if by chance the user put some sentry items in there, give them precedence. - if transaction._baggage and transaction._baggage.sentry_items: - sentry_items.update(transaction._baggage.sentry_items) - - return Baggage(sentry_items, mutable=False) - - def freeze(self): - # type: () -> None + def freeze(self) -> None: self.mutable = False - def dynamic_sampling_context(self): - # type: () -> Dict[str, str] + def dynamic_sampling_context(self) -> Dict[str, str]: header = {} for key, item in self.sentry_items.items(): @@ -693,8 +613,7 @@ def dynamic_sampling_context(self): return header - def serialize(self, include_third_party=False): - # type: (bool) -> str + def serialize(self, include_third_party: bool = False) -> str: items = [] for key, val in self.sentry_items.items(): @@ -708,8 +627,7 @@ def serialize(self, include_third_party=False): return ",".join(items) @staticmethod - def strip_sentry_baggage(header): - # type: (str) -> str + def strip_sentry_baggage(header: str) -> str: """Remove Sentry baggage from the given header. Given a Baggage header, return a new Baggage header with all Sentry baggage items removed. @@ -722,27 +640,11 @@ def strip_sentry_baggage(header): ) ) - def _sample_rand(self): - # type: () -> Optional[Decimal] - """Convenience method to get the sample_rand value from the sentry_items. - - We validate the value and parse it as a Decimal before returning it. The value is considered - valid if it is a Decimal in the range [0, 1). - """ - sample_rand = try_convert(Decimal, self.sentry_items.get("sample_rand")) - - if sample_rand is not None and Decimal(0) <= sample_rand < Decimal(1): - return sample_rand - - return None - - def __repr__(self): - # type: () -> str + def __repr__(self) -> str: return f'' -def should_propagate_trace(client, url): - # type: (sentry_sdk.client.BaseClient, str) -> bool +def should_propagate_trace(client: sentry_sdk.client.BaseClient, url: str) -> bool: """ Returns True if url matches trace_propagation_targets configured in the given client. Otherwise, returns False. """ @@ -754,8 +656,22 @@ def should_propagate_trace(client, url): return match_regex_list(url, trace_propagation_targets, substring_matching=True) -def normalize_incoming_data(incoming_data): - # type: (Dict[str, Any]) -> Dict[str, Any] +def _is_span_origin_excluded(origin: Optional[str]) -> bool: + """ + Check if spans with this origin should be ignored based on the `exclude_span_origins` option. + """ + if origin is None: + return False + + client = sentry_sdk.get_client() + exclude_span_origins = client.options.get("exclude_span_origins") + if not exclude_span_origins: + return False + + return match_regex_list(origin, exclude_span_origins, substring_matching=True) + + +def normalize_incoming_data(incoming_data: Dict[str, Any]) -> Dict[str, Any]: """ Normalizes incoming data so the keys are all lowercase with dashes instead of underscores and stripped from known prefixes. """ @@ -770,8 +686,7 @@ def normalize_incoming_data(incoming_data): return data -def start_child_span_decorator(func): - # type: (Any) -> Any +def start_child_span_decorator(func: Any) -> Any: """ Decorator to add child spans for functions. @@ -781,9 +696,7 @@ def start_child_span_decorator(func): if inspect.iscoroutinefunction(func): @wraps(func) - async def func_with_tracing(*args, **kwargs): - # type: (*Any, **Any) -> Any - + async def func_with_tracing(*args: Any, **kwargs: Any) -> Any: span = get_current_span() if span is None: @@ -794,14 +707,17 @@ async def func_with_tracing(*args, **kwargs): ) return await func(*args, **kwargs) - with span.start_child( + with sentry_sdk.start_span( op=OP.FUNCTION, name=qualname_from_function(func), + only_as_child_span=True, ): return await func(*args, **kwargs) try: - func_with_tracing.__signature__ = inspect.signature(func) # type: ignore[attr-defined] + func_with_tracing.__signature__ = inspect.signature( # type: ignore[attr-defined] + func + ) except Exception: pass @@ -809,9 +725,7 @@ async def func_with_tracing(*args, **kwargs): else: @wraps(func) - def func_with_tracing(*args, **kwargs): - # type: (*Any, **Any) -> Any - + def func_with_tracing(*args: Any, **kwargs: Any) -> Any: span = get_current_span() if span is None: @@ -822,22 +736,26 @@ def func_with_tracing(*args, **kwargs): ) return func(*args, **kwargs) - with span.start_child( + with sentry_sdk.start_span( op=OP.FUNCTION, name=qualname_from_function(func), + only_as_child_span=True, ): return func(*args, **kwargs) try: - func_with_tracing.__signature__ = inspect.signature(func) # type: ignore[attr-defined] + func_with_tracing.__signature__ = inspect.signature( # type: ignore[attr-defined] + func + ) except Exception: pass return func_with_tracing -def get_current_span(scope=None): - # type: (Optional[sentry_sdk.Scope]) -> Optional[Span] +def get_current_span( + scope: Optional[sentry_sdk.scope.Scope] = None, +) -> Optional[sentry_sdk.tracing.Span]: """ Returns the currently active span if there is one running, otherwise `None` """ @@ -847,11 +765,9 @@ def get_current_span(scope=None): def _generate_sample_rand( - trace_id, # type: Optional[str] - *, - interval=(0.0, 1.0), # type: tuple[float, float] -): - # type: (...) -> Decimal + trace_id: Optional[str], + interval: tuple[float, float] = (0.0, 1.0), +) -> Decimal: """Generate a sample_rand value from a trace ID. The generated value will be pseudorandomly chosen from the provided @@ -881,8 +797,9 @@ def _generate_sample_rand( ) -def _sample_rand_range(parent_sampled, sample_rate): - # type: (Optional[bool], Optional[float]) -> tuple[float, float] +def _sample_rand_range( + parent_sampled: Optional[bool], sample_rate: Optional[float] +) -> tuple[float, float]: """ Compute the lower (inclusive) and upper (exclusive) bounds of the range of values that a generated sample_rand value must fall into, given the parent_sampled and @@ -896,12 +813,39 @@ def _sample_rand_range(parent_sampled, sample_rate): return sample_rate, 1.0 -# Circular imports -from sentry_sdk.tracing import ( - BAGGAGE_HEADER_NAME, - LOW_QUALITY_TRANSACTION_SOURCES, - SENTRY_TRACE_HEADER_NAME, -) +def get_span_status_from_http_code(http_status_code: int) -> str: + """ + Returns the Sentry status corresponding to the given HTTP status code. -if TYPE_CHECKING: - from sentry_sdk.tracing import Span + See: https://develop.sentry.dev/sdk/event-payloads/contexts/#trace-context + """ + if http_status_code < 400: + return SPANSTATUS.OK + + elif 400 <= http_status_code < 500: + if http_status_code == 403: + return SPANSTATUS.PERMISSION_DENIED + elif http_status_code == 404: + return SPANSTATUS.NOT_FOUND + elif http_status_code == 429: + return SPANSTATUS.RESOURCE_EXHAUSTED + elif http_status_code == 413: + return SPANSTATUS.FAILED_PRECONDITION + elif http_status_code == 401: + return SPANSTATUS.UNAUTHENTICATED + elif http_status_code == 409: + return SPANSTATUS.ALREADY_EXISTS + else: + return SPANSTATUS.INVALID_ARGUMENT + + elif 500 <= http_status_code < 600: + if http_status_code == 504: + return SPANSTATUS.DEADLINE_EXCEEDED + elif http_status_code == 501: + return SPANSTATUS.UNIMPLEMENTED + elif http_status_code == 503: + return SPANSTATUS.UNAVAILABLE + else: + return SPANSTATUS.INTERNAL_ERROR + + return SPANSTATUS.UNKNOWN_ERROR diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py index e904081959..ac7a8c3522 100644 --- a/sentry_sdk/transport.py +++ b/sentry_sdk/transport.py @@ -1,3 +1,4 @@ +from __future__ import annotations from abc import ABC, abstractmethod import io import os @@ -5,7 +6,6 @@ import socket import ssl import time -import warnings from datetime import datetime, timedelta, timezone from collections import defaultdict from urllib.request import getproxies @@ -15,33 +15,43 @@ except ImportError: brotli = None +try: + import httpcore + import h2 # noqa: F401 + + HTTP2_ENABLED = True +except ImportError: + HTTP2_ENABLED = False + import urllib3 import certifi -import sentry_sdk from sentry_sdk.consts import EndpointType from sentry_sdk.utils import Dsn, logger, capture_internal_exceptions from sentry_sdk.worker import BackgroundWorker from sentry_sdk.envelope import Envelope, Item, PayloadRef -from typing import TYPE_CHECKING, cast, List, Dict +from typing import TYPE_CHECKING if TYPE_CHECKING: - from typing import Any - from typing import Callable - from typing import DefaultDict - from typing import Iterable - from typing import Mapping - from typing import Optional - from typing import Self - from typing import Tuple - from typing import Type - from typing import Union - + from typing import ( + List, + Dict, + Any, + Callable, + DefaultDict, + Iterable, + Mapping, + Optional, + Tuple, + Type, + Union, + Self, + ) from urllib3.poolmanager import PoolManager from urllib3.poolmanager import ProxyManager - from sentry_sdk._types import Event, EventDataCategory + from sentry_sdk._types import EventDataCategory KEEP_ALIVE_SOCKET_OPTIONS = [] for option in [ @@ -64,38 +74,17 @@ class Transport(ABC): A transport is used to send an event to sentry. """ - parsed_dsn = None # type: Optional[Dsn] + parsed_dsn: Optional[Dsn] = None - def __init__(self, options=None): - # type: (Self, Optional[Dict[str, Any]]) -> None + def __init__(self: Self, options: Optional[Dict[str, Any]] = None) -> None: self.options = options if options and options["dsn"] is not None and options["dsn"]: self.parsed_dsn = Dsn(options["dsn"]) else: self.parsed_dsn = None - def capture_event(self, event): - # type: (Self, Event) -> None - """ - DEPRECATED: Please use capture_envelope instead. - - This gets invoked with the event dictionary when an event should - be sent to sentry. - """ - - warnings.warn( - "capture_event is deprecated, please use capture_envelope instead!", - DeprecationWarning, - stacklevel=2, - ) - - envelope = Envelope() - envelope.add_event(event) - self.capture_envelope(envelope) - @abstractmethod - def capture_envelope(self, envelope): - # type: (Self, Envelope) -> None + def capture_envelope(self: Self, envelope: Envelope) -> None: """ Send an envelope to Sentry. @@ -106,11 +95,10 @@ def capture_envelope(self, envelope): pass def flush( - self, - timeout, - callback=None, - ): - # type: (Self, float, Optional[Any]) -> None + self: Self, + timeout: float, + callback: Optional[Any] = None, + ) -> None: """ Wait `timeout` seconds for the current events to be sent out. @@ -119,8 +107,7 @@ def flush( """ return None - def kill(self): - # type: (Self) -> None + def kill(self: Self) -> None: """ Forcefully kills the transport. @@ -130,14 +117,13 @@ def kill(self): return None def record_lost_event( - self, - reason, # type: str - data_category=None, # type: Optional[EventDataCategory] - item=None, # type: Optional[Item] + self: Self, + reason: str, + data_category: Optional[EventDataCategory] = None, + item: Optional[Item] = None, *, - quantity=1, # type: int - ): - # type: (...) -> None + quantity: int = 1, + ) -> None: """This increments a counter for event loss by reason and data category by the given positive-int quantity (default 1). @@ -154,13 +140,13 @@ def record_lost_event( """ return None - def is_healthy(self): - # type: (Self) -> bool + def is_healthy(self: Self) -> bool: return True -def _parse_rate_limits(header, now=None): - # type: (str, Optional[datetime]) -> Iterable[Tuple[Optional[EventDataCategory], datetime]] +def _parse_rate_limits( + header: str, now: Optional[datetime] = None +) -> Iterable[Tuple[Optional[str], datetime]]: if now is None: now = datetime.now(timezone.utc) @@ -171,17 +157,7 @@ def _parse_rate_limits(header, now=None): retry_after = now + timedelta(seconds=int(retry_after_val)) for category in categories and categories.split(";") or (None,): - if category == "metric_bucket": - try: - namespaces = parameters[4].split(";") - except IndexError: - namespaces = [] - - if not namespaces or "custom" in namespaces: - yield category, retry_after # type: ignore - - else: - yield category, retry_after # type: ignore + yield category, retry_after except (LookupError, ValueError): continue @@ -191,28 +167,24 @@ class BaseHttpTransport(Transport): TIMEOUT = 30 # seconds - def __init__(self, options): - # type: (Self, Dict[str, Any]) -> None + def __init__(self: Self, options: Dict[str, Any]) -> None: from sentry_sdk.consts import VERSION Transport.__init__(self, options) assert self.parsed_dsn is not None - self.options = options # type: Dict[str, Any] + self.options: Dict[str, Any] = options self._worker = BackgroundWorker(queue_size=options["transport_queue_size"]) self._auth = self.parsed_dsn.to_auth("sentry.python/%s" % VERSION) - self._disabled_until = {} # type: Dict[Optional[EventDataCategory], datetime] + self._disabled_until: Dict[Optional[str], datetime] = {} # We only use this Retry() class for the `get_retry_after` method it exposes self._retry = urllib3.util.Retry() - self._discarded_events = defaultdict( - int - ) # type: DefaultDict[Tuple[EventDataCategory, str], int] + self._discarded_events: DefaultDict[Tuple[EventDataCategory, str], int] = ( + defaultdict(int) + ) self._last_client_report_sent = time.time() self._pool = self._make_pool() - # Backwards compatibility for deprecated `self.hub_class` attribute - self._hub_cls = sentry_sdk.Hub - experiments = options.get("_experiments", {}) compression_level = experiments.get( "transport_compression_level", @@ -253,14 +225,13 @@ def __init__(self, options): self._compression_level = 4 def record_lost_event( - self, - reason, # type: str - data_category=None, # type: Optional[EventDataCategory] - item=None, # type: Optional[Item] + self: Self, + reason: str, + data_category: Optional[EventDataCategory] = None, + item: Optional[Item] = None, *, - quantity=1, # type: int - ): - # type: (...) -> None + quantity: int = 1, + ) -> None: if not self.options["send_client_reports"]: return @@ -273,9 +244,7 @@ def record_lost_event( event = item.get_transaction_event() or {} # +1 for the transaction itself - span_count = ( - len(cast(List[Dict[str, object]], event.get("spans") or [])) + 1 - ) + span_count = len(event.get("spans") or []) + 1 self.record_lost_event(reason, "span", quantity=span_count) elif data_category == "attachment": @@ -288,12 +257,12 @@ def record_lost_event( self._discarded_events[data_category, reason] += quantity - def _get_header_value(self, response, header): - # type: (Self, Any, str) -> Optional[str] + def _get_header_value(self: Self, response: Any, header: str) -> Optional[str]: return response.headers.get(header) - def _update_rate_limits(self, response): - # type: (Self, Union[urllib3.BaseHTTPResponse, httpcore.Response]) -> None + def _update_rate_limits( + self: Self, response: Union[urllib3.BaseHTTPResponse, httpcore.Response] + ) -> None: # new sentries with more rate limit insights. We honor this header # no matter of the status code to update our internal rate limits. @@ -318,16 +287,13 @@ def _update_rate_limits(self, response): ) def _send_request( - self, - body, - headers, - endpoint_type=EndpointType.ENVELOPE, - envelope=None, - ): - # type: (Self, bytes, Dict[str, str], EndpointType, Optional[Envelope]) -> None - - def record_loss(reason): - # type: (str) -> None + self: Self, + body: bytes, + headers: Dict[str, str], + endpoint_type: EndpointType = EndpointType.ENVELOPE, + envelope: Optional[Envelope] = None, + ) -> None: + def record_loss(reason: str) -> None: if envelope is None: self.record_lost_event(reason, data_category="error") else: @@ -374,12 +340,12 @@ def record_loss(reason): finally: response.close() - def on_dropped_event(self, _reason): - # type: (Self, str) -> None + def on_dropped_event(self: Self, _reason: str) -> None: return None - def _fetch_pending_client_report(self, force=False, interval=60): - # type: (Self, bool, int) -> Optional[Item] + def _fetch_pending_client_report( + self: Self, force: bool = False, interval: int = 60 + ) -> Optional[Item]: if not self.options["send_client_reports"]: return None @@ -409,49 +375,36 @@ def _fetch_pending_client_report(self, force=False, interval=60): type="client_report", ) - def _flush_client_reports(self, force=False): - # type: (Self, bool) -> None + def _flush_client_reports(self: Self, force: bool = False) -> None: client_report = self._fetch_pending_client_report(force=force, interval=60) if client_report is not None: self.capture_envelope(Envelope(items=[client_report])) - def _check_disabled(self, category): - # type: (str) -> bool - def _disabled(bucket): - # type: (Any) -> bool - - # The envelope item type used for metrics is statsd - # whereas the rate limit category is metric_bucket - if bucket == "statsd": - bucket = "metric_bucket" - + def _check_disabled(self: Self, category: EventDataCategory) -> bool: + def _disabled(bucket: Optional[EventDataCategory]) -> bool: ts = self._disabled_until.get(bucket) return ts is not None and ts > datetime.now(timezone.utc) return _disabled(category) or _disabled(None) - def _is_rate_limited(self): - # type: (Self) -> bool + def _is_rate_limited(self: Self) -> bool: return any( ts > datetime.now(timezone.utc) for ts in self._disabled_until.values() ) - def _is_worker_full(self): - # type: (Self) -> bool + def _is_worker_full(self: Self) -> bool: return self._worker.full() - def is_healthy(self): - # type: (Self) -> bool + def is_healthy(self: Self) -> bool: return not (self._is_worker_full() or self._is_rate_limited()) - def _send_envelope(self, envelope): - # type: (Self, Envelope) -> None + def _send_envelope(self: Self, envelope: Envelope) -> None: # remove all items from the envelope which are over quota new_items = [] for item in envelope.items: if self._check_disabled(item.data_category): - if item.data_category in ("transaction", "error", "default", "statsd"): + if item.data_category in ("transaction", "error", "default"): self.on_dropped_event("self_rate_limits") self.record_lost_event("ratelimit_backoff", item=item) else: @@ -497,8 +450,9 @@ def _send_envelope(self, envelope): ) return None - def _serialize_envelope(self, envelope): - # type: (Self, Envelope) -> tuple[Optional[str], io.BytesIO] + def _serialize_envelope( + self: Self, envelope: Envelope + ) -> tuple[Optional[str], io.BytesIO]: content_encoding = None body = io.BytesIO() if self._compression_level == 0 or self._compression_algo is None: @@ -519,12 +473,10 @@ def _serialize_envelope(self, envelope): return content_encoding, body - def _get_pool_options(self): - # type: (Self) -> Dict[str, Any] + def _get_pool_options(self: Self) -> Dict[str, Any]: raise NotImplementedError() - def _in_no_proxy(self, parsed_dsn): - # type: (Self, Dsn) -> bool + def _in_no_proxy(self: Self, parsed_dsn: Dsn) -> bool: no_proxy = getproxies().get("no") if not no_proxy: return False @@ -534,26 +486,28 @@ def _in_no_proxy(self, parsed_dsn): return True return False - def _make_pool(self): - # type: (Self) -> Union[PoolManager, ProxyManager, httpcore.SOCKSProxy, httpcore.HTTPProxy, httpcore.ConnectionPool] + def _make_pool( + self: Self, + ) -> Union[ + PoolManager, + ProxyManager, + httpcore.SOCKSProxy, + httpcore.HTTPProxy, + httpcore.ConnectionPool, + ]: raise NotImplementedError() def _request( - self, - method, - endpoint_type, - body, - headers, - ): - # type: (Self, str, EndpointType, Any, Mapping[str, str]) -> Union[urllib3.BaseHTTPResponse, httpcore.Response] + self: Self, + method: str, + endpoint_type: EndpointType, + body: Any, + headers: Mapping[str, str], + ) -> Union[urllib3.BaseHTTPResponse, httpcore.Response]: raise NotImplementedError() - def capture_envelope( - self, envelope # type: Envelope - ): - # type: (...) -> None - def send_envelope_wrapper(): - # type: () -> None + def capture_envelope(self: Self, envelope: Envelope) -> None: + def send_envelope_wrapper() -> None: with capture_internal_exceptions(): self._send_envelope(envelope) self._flush_client_reports() @@ -564,53 +518,26 @@ def send_envelope_wrapper(): self.record_lost_event("queue_overflow", item=item) def flush( - self, - timeout, - callback=None, - ): - # type: (Self, float, Optional[Callable[[int, float], None]]) -> None + self: Self, + timeout: float, + callback: Optional[Callable[[int, float], None]] = None, + ) -> None: logger.debug("Flushing HTTP transport") if timeout > 0: self._worker.submit(lambda: self._flush_client_reports(force=True)) self._worker.flush(timeout, callback) - def kill(self): - # type: (Self) -> None + def kill(self: Self) -> None: logger.debug("Killing HTTP transport") self._worker.kill() - @staticmethod - def _warn_hub_cls(): - # type: () -> None - """Convenience method to warn users about the deprecation of the `hub_cls` attribute.""" - warnings.warn( - "The `hub_cls` attribute is deprecated and will be removed in a future release.", - DeprecationWarning, - stacklevel=3, - ) - - @property - def hub_cls(self): - # type: (Self) -> type[sentry_sdk.Hub] - """DEPRECATED: This attribute is deprecated and will be removed in a future release.""" - HttpTransport._warn_hub_cls() - return self._hub_cls - - @hub_cls.setter - def hub_cls(self, value): - # type: (Self, type[sentry_sdk.Hub]) -> None - """DEPRECATED: This attribute is deprecated and will be removed in a future release.""" - HttpTransport._warn_hub_cls() - self._hub_cls = value - class HttpTransport(BaseHttpTransport): if TYPE_CHECKING: _pool: Union[PoolManager, ProxyManager] - def _get_pool_options(self): - # type: (Self) -> Dict[str, Any] + def _get_pool_options(self: Self) -> Dict[str, Any]: num_pools = self.options.get("_experiments", {}).get("transport_num_pools") options = { @@ -619,7 +546,7 @@ def _get_pool_options(self): "timeout": urllib3.Timeout(total=self.TIMEOUT), } - socket_options = None # type: Optional[List[Tuple[int, int, int | bytes]]] + socket_options: Optional[List[Tuple[int, int, int | bytes]]] = None if self.options["socket_options"] is not None: socket_options = self.options["socket_options"] @@ -652,8 +579,7 @@ def _get_pool_options(self): return options - def _make_pool(self): - # type: (Self) -> Union[PoolManager, ProxyManager] + def _make_pool(self: Self) -> Union[PoolManager, ProxyManager]: if self.parsed_dsn is None: raise ValueError("Cannot create HTTP-based transport without valid DSN") @@ -699,13 +625,12 @@ def _make_pool(self): return urllib3.PoolManager(**opts) def _request( - self, - method, - endpoint_type, - body, - headers, - ): - # type: (Self, str, EndpointType, Any, Mapping[str, str]) -> urllib3.BaseHTTPResponse + self: Self, + method: str, + endpoint_type: EndpointType, + body: Any, + headers: Mapping[str, str], + ) -> urllib3.BaseHTTPResponse: return self._pool.request( method, self._auth.get_api_url(endpoint_type), @@ -714,14 +639,10 @@ def _request( ) -try: - import httpcore - import h2 # noqa: F401 -except ImportError: +if not HTTP2_ENABLED: # Sorry, no Http2Transport for you class Http2Transport(HttpTransport): - def __init__(self, options): - # type: (Self, Dict[str, Any]) -> None + def __init__(self: Self, options: Dict[str, Any]) -> None: super().__init__(options) logger.warning( "You tried to use HTTP2Transport but don't have httpcore[http2] installed. Falling back to HTTPTransport." @@ -739,8 +660,7 @@ class Http2Transport(BaseHttpTransport): # type: ignore httpcore.SOCKSProxy, httpcore.HTTPProxy, httpcore.ConnectionPool ] - def _get_header_value(self, response, header): - # type: (Self, httpcore.Response, str) -> Optional[str] + def _get_header_value(self: Self, response: Any, header: str) -> Optional[str]: return next( ( val.decode("ascii") @@ -751,13 +671,12 @@ def _get_header_value(self, response, header): ) def _request( - self, - method, - endpoint_type, - body, - headers, - ): - # type: (Self, str, EndpointType, Any, Mapping[str, str]) -> httpcore.Response + self: Self, + method: str, + endpoint_type: EndpointType, + body: Any, + headers: Mapping[str, str], + ) -> httpcore.Response: response = self._pool.request( method, self._auth.get_api_url(endpoint_type), @@ -774,13 +693,12 @@ def _request( ) return response - def _get_pool_options(self): - # type: (Self) -> Dict[str, Any] - options = { + def _get_pool_options(self: Self) -> Dict[str, Any]: + options: Dict[str, Any] = { "http2": self.parsed_dsn is not None and self.parsed_dsn.scheme == "https", "retries": 3, - } # type: Dict[str, Any] + } socket_options = ( self.options["socket_options"] @@ -811,8 +729,9 @@ def _get_pool_options(self): return options - def _make_pool(self): - # type: (Self) -> Union[httpcore.SOCKSProxy, httpcore.HTTPProxy, httpcore.ConnectionPool] + def _make_pool( + self: Self, + ) -> Union[httpcore.SOCKSProxy, httpcore.HTTPProxy, httpcore.ConnectionPool]: if self.parsed_dsn is None: raise ValueError("Cannot create HTTP-based transport without valid DSN") proxy = None @@ -855,58 +774,20 @@ def _make_pool(self): return httpcore.ConnectionPool(**opts) -class _FunctionTransport(Transport): - """ - DEPRECATED: Users wishing to provide a custom transport should subclass - the Transport class, rather than providing a function. - """ - - def __init__( - self, func # type: Callable[[Event], None] - ): - # type: (...) -> None - Transport.__init__(self) - self._func = func - - def capture_event( - self, event # type: Event - ): - # type: (...) -> None - self._func(event) - return None - - def capture_envelope(self, envelope: Envelope) -> None: - # Since function transports expect to be called with an event, we need - # to iterate over the envelope and call the function for each event, via - # the deprecated capture_event method. - event = envelope.get_event() - if event is not None: - self.capture_event(event) - - -def make_transport(options): - # type: (Dict[str, Any]) -> Optional[Transport] +def make_transport(options: Dict[str, Any]) -> Optional[Transport]: ref_transport = options["transport"] use_http2_transport = options.get("_experiments", {}).get("transport_http2", False) # By default, we use the http transport class - transport_cls = ( + transport_cls: Type[Transport] = ( Http2Transport if use_http2_transport else HttpTransport - ) # type: Type[Transport] + ) if isinstance(ref_transport, Transport): return ref_transport elif isinstance(ref_transport, type) and issubclass(ref_transport, Transport): transport_cls = ref_transport - elif callable(ref_transport): - warnings.warn( - "Function transports are deprecated and will be removed in a future release." - "Please provide a Transport instance or subclass, instead.", - DeprecationWarning, - stacklevel=2, - ) - return _FunctionTransport(ref_transport) # if a transport class is given only instantiate it if the dsn is not # empty or None diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index b0f3fa4a4c..e9270f37bd 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -1,3 +1,4 @@ +from __future__ import annotations import base64 import json import linecache @@ -25,30 +26,28 @@ BaseExceptionGroup = None # type: ignore import sentry_sdk -from sentry_sdk._compat import PY37 from sentry_sdk.consts import ( DEFAULT_ADD_FULL_STACK, DEFAULT_MAX_STACK_FRAMES, DEFAULT_MAX_VALUE_LENGTH, + SPANDATA, EndpointType, ) from sentry_sdk._types import Annotated, AnnotatedValue, SENSITIVE_DATA_SUBSTITUTE -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, overload if TYPE_CHECKING: from types import FrameType, TracebackType from typing import ( Any, Callable, - cast, ContextManager, Dict, Iterator, List, NoReturn, Optional, - overload, ParamSpec, Set, Tuple, @@ -57,9 +56,10 @@ Union, ) - from gevent.hub import Hub + from gevent.hub import Hub as GeventHub + from opentelemetry.util.types import AttributeValue - from sentry_sdk._types import Event, ExcInfo, Log, Hint + from sentry_sdk._types import Event, ExcInfo P = ParamSpec("P") R = TypeVar("R") @@ -86,9 +86,15 @@ be affected by this limit if they have a custom recursion limit. """ +MAX_EXCEPTIONS = 25 +"""Maximum number of exceptions in a chain or group to send to Sentry. -def env_to_bool(value, *, strict=False): - # type: (Any, Optional[bool]) -> bool | None +This is a sanity limit to avoid ending in an infinite loop of exceptions when the same exception is in the root and a leave +of the exception tree. +""" + + +def env_to_bool(value: Any, *, strict: Optional[bool] = False) -> Optional[bool]: """Casts an ENV variable value to boolean using the constants defined above. In strict mode, it may return None if the value doesn't match any of the predefined values. """ @@ -103,14 +109,12 @@ def env_to_bool(value, *, strict=False): return None if strict else bool(value) -def json_dumps(data): - # type: (Any) -> bytes +def json_dumps(data: Any) -> bytes: """Serialize data into a compact JSON representation encoded as UTF-8.""" return json.dumps(data, allow_nan=False, separators=(",", ":")).encode("utf-8") -def get_git_revision(): - # type: () -> Optional[str] +def get_git_revision() -> Optional[str]: try: with open(os.path.devnull, "w+") as null: # prevent command prompt windows from popping up on windows @@ -137,8 +141,7 @@ def get_git_revision(): return revision -def get_default_release(): - # type: () -> Optional[str] +def get_default_release() -> Optional[str]: """Try to guess a default release.""" release = os.environ.get("SENTRY_RELEASE") if release: @@ -161,8 +164,7 @@ def get_default_release(): return None -def get_sdk_name(installed_integrations): - # type: (List[str]) -> str +def get_sdk_name(installed_integrations: List[str]) -> str: """Return the SDK name including the name of the used web framework.""" # Note: I can not use for example sentry_sdk.integrations.django.DjangoIntegration.identifier @@ -200,12 +202,15 @@ def get_sdk_name(installed_integrations): class CaptureInternalException: __slots__ = () - def __enter__(self): - # type: () -> ContextManager[Any] + def __enter__(self) -> ContextManager[Any]: return self - def __exit__(self, ty, value, tb): - # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]) -> bool + def __exit__( + self, + ty: Optional[Type[BaseException]], + value: Optional[BaseException], + tb: Optional[TracebackType], + ) -> bool: if ty is not None and value is not None: capture_internal_exception((ty, value, tb)) @@ -215,13 +220,11 @@ def __exit__(self, ty, value, tb): _CAPTURE_INTERNAL_EXCEPTION = CaptureInternalException() -def capture_internal_exceptions(): - # type: () -> ContextManager[Any] +def capture_internal_exceptions() -> ContextManager[Any]: return _CAPTURE_INTERNAL_EXCEPTION -def capture_internal_exception(exc_info): - # type: (ExcInfo) -> None +def capture_internal_exception(exc_info: ExcInfo) -> None: """ Capture an exception that is likely caused by a bug in the SDK itself. @@ -232,13 +235,11 @@ def capture_internal_exception(exc_info): logger.error("Internal error in sentry_sdk", exc_info=exc_info) -def to_timestamp(value): - # type: (datetime) -> float +def to_timestamp(value: datetime) -> float: return (value - epoch).total_seconds() -def format_timestamp(value): - # type: (datetime) -> str +def format_timestamp(value: datetime) -> str: """Formats a timestamp in RFC 3339 format. Any datetime objects with a non-UTC timezone are converted to UTC, so that all timestamps are formatted in UTC. @@ -250,33 +251,9 @@ def format_timestamp(value): return utctime.strftime("%Y-%m-%dT%H:%M:%S.%fZ") -ISO_TZ_SEPARATORS = frozenset(("+", "-")) - - -def datetime_from_isoformat(value): - # type: (str) -> datetime - try: - result = datetime.fromisoformat(value) - except (AttributeError, ValueError): - # py 3.6 - timestamp_format = ( - "%Y-%m-%dT%H:%M:%S.%f" if "." in value else "%Y-%m-%dT%H:%M:%S" - ) - if value.endswith("Z"): - value = value[:-1] + "+0000" - - if value[-6] in ISO_TZ_SEPARATORS: - timestamp_format += "%z" - value = value[:-3] + value[-2:] - elif value[-5] in ISO_TZ_SEPARATORS: - timestamp_format += "%z" - - result = datetime.strptime(value, timestamp_format) - return result.astimezone(timezone.utc) - - -def event_hint_with_exc_info(exc_info=None): - # type: (Optional[ExcInfo]) -> Dict[str, Optional[ExcInfo]] +def event_hint_with_exc_info( + exc_info: Optional[ExcInfo] = None, +) -> Dict[str, Optional[ExcInfo]]: """Creates a hint with the exc info filled in.""" if exc_info is None: exc_info = sys.exc_info() @@ -294,8 +271,7 @@ class BadDsn(ValueError): class Dsn: """Represents a DSN.""" - def __init__(self, value): - # type: (Union[Dsn, str]) -> None + def __init__(self, value: Union[Dsn, str]) -> None: if isinstance(value, Dsn): self.__dict__ = dict(value.__dict__) return @@ -311,7 +287,7 @@ def __init__(self, value): self.host = parts.hostname if parts.port is None: - self.port = self.scheme == "https" and 443 or 80 # type: int + self.port: int = self.scheme == "https" and 443 or 80 else: self.port = parts.port @@ -331,16 +307,14 @@ def __init__(self, value): self.path = "/".join(path) + "/" @property - def netloc(self): - # type: () -> str + def netloc(self) -> str: """The netloc part of a DSN.""" rv = self.host if (self.scheme, self.port) not in (("http", 80), ("https", 443)): rv = "%s:%s" % (rv, self.port) return rv - def to_auth(self, client=None): - # type: (Optional[Any]) -> Auth + def to_auth(self, client: Optional[Any] = None) -> Auth: """Returns the auth info object for this dsn.""" return Auth( scheme=self.scheme, @@ -352,8 +326,7 @@ def to_auth(self, client=None): client=client, ) - def __str__(self): - # type: () -> str + def __str__(self) -> str: return "%s://%s%s@%s%s%s" % ( self.scheme, self.public_key, @@ -369,16 +342,15 @@ class Auth: def __init__( self, - scheme, - host, - project_id, - public_key, - secret_key=None, - version=7, - client=None, - path="/", - ): - # type: (str, str, str, str, Optional[str], int, Optional[Any], str) -> None + scheme: str, + host: str, + project_id: str, + public_key: str, + secret_key: Optional[str] = None, + version: int = 7, + client: Optional[Any] = None, + path: str = "/", + ) -> None: self.scheme = scheme self.host = host self.path = path @@ -388,10 +360,7 @@ def __init__( self.version = version self.client = client - def get_api_url( - self, type=EndpointType.ENVELOPE # type: EndpointType - ): - # type: (...) -> str + def get_api_url(self, type: EndpointType = EndpointType.ENVELOPE) -> str: """Returns the API url for storing events.""" return "%s://%s%sapi/%s/%s/" % ( self.scheme, @@ -401,8 +370,7 @@ def get_api_url( type.value, ) - def to_header(self): - # type: () -> str + def to_header(self) -> str: """Returns the auth header a string.""" rv = [("sentry_key", self.public_key), ("sentry_version", self.version)] if self.client is not None: @@ -412,21 +380,18 @@ def to_header(self): return "Sentry " + ", ".join("%s=%s" % (key, value) for key, value in rv) -def get_type_name(cls): - # type: (Optional[type]) -> Optional[str] +def get_type_name(cls: Optional[type]) -> Optional[str]: return getattr(cls, "__qualname__", None) or getattr(cls, "__name__", None) -def get_type_module(cls): - # type: (Optional[type]) -> Optional[str] +def get_type_module(cls: Optional[type]) -> Optional[str]: mod = getattr(cls, "__module__", None) if mod not in (None, "builtins", "__builtins__"): return mod return None -def should_hide_frame(frame): - # type: (FrameType) -> bool +def should_hide_frame(frame: FrameType) -> bool: try: mod = frame.f_globals["__name__"] if mod.startswith("sentry_sdk."): @@ -444,9 +409,8 @@ def should_hide_frame(frame): return False -def iter_stacks(tb): - # type: (Optional[TracebackType]) -> Iterator[TracebackType] - tb_ = tb # type: Optional[TracebackType] +def iter_stacks(tb: Optional[TracebackType]) -> Iterator[TracebackType]: + tb_: Optional[TracebackType] = tb while tb_ is not None: if not should_hide_frame(tb_.tb_frame): yield tb_ @@ -454,18 +418,17 @@ def iter_stacks(tb): def get_lines_from_file( - filename, # type: str - lineno, # type: int - max_length=None, # type: Optional[int] - loader=None, # type: Optional[Any] - module=None, # type: Optional[str] -): - # type: (...) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]] + filename: str, + lineno: int, + max_length: Optional[int] = None, + loader: Optional[Any] = None, + module: Optional[str] = None, +) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]]: context_lines = 5 source = None if loader is not None and hasattr(loader, "get_source"): try: - source_str = loader.get_source(module) # type: Optional[str] + source_str: Optional[str] = loader.get_source(module) except (ImportError, IOError): source_str = None if source_str is not None: @@ -500,13 +463,12 @@ def get_lines_from_file( def get_source_context( - frame, # type: FrameType - tb_lineno, # type: Optional[int] - max_value_length=None, # type: Optional[int] -): - # type: (...) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]] + frame: FrameType, + tb_lineno: Optional[int], + max_value_length: Optional[int] = None, +) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]]: try: - abs_path = frame.f_code.co_filename # type: Optional[str] + abs_path: Optional[str] = frame.f_code.co_filename except Exception: abs_path = None try: @@ -527,24 +489,23 @@ def get_source_context( return [], None, [] -def safe_str(value): - # type: (Any) -> str +def safe_str(value: Any) -> str: try: return str(value) except Exception: return safe_repr(value) -def safe_repr(value): - # type: (Any) -> str +def safe_repr(value: Any) -> str: try: return repr(value) except Exception: return "" -def filename_for_module(module, abs_path): - # type: (Optional[str], Optional[str]) -> Optional[str] +def filename_for_module( + module: Optional[str], abs_path: Optional[str] +) -> Optional[str]: if not abs_path or not module: return abs_path @@ -568,14 +529,13 @@ def filename_for_module(module, abs_path): def serialize_frame( - frame, - tb_lineno=None, - include_local_variables=True, - include_source_context=True, - max_value_length=None, - custom_repr=None, -): - # type: (FrameType, Optional[int], bool, bool, Optional[int], Optional[Callable[..., Optional[str]]]) -> Dict[str, Any] + frame: FrameType, + tb_lineno: Optional[int] = None, + include_local_variables: bool = True, + include_source_context: bool = True, + max_value_length: Optional[int] = None, + custom_repr: Optional[Callable[..., Optional[str]]] = None, +) -> Dict[str, Any]: f_code = getattr(frame, "f_code", None) if not f_code: abs_path = None @@ -596,13 +556,13 @@ def serialize_frame( except Exception: os_abs_path = None - rv = { + rv: Dict[str, Any] = { "filename": filename_for_module(module, abs_path) or None, "abs_path": os_abs_path, "function": function or "", "module": module, "lineno": tb_lineno, - } # type: Dict[str, Any] + } if include_source_context: rv["pre_context"], rv["context_line"], rv["post_context"] = get_source_context( @@ -620,15 +580,14 @@ def serialize_frame( def current_stacktrace( - include_local_variables=True, # type: bool - include_source_context=True, # type: bool - max_value_length=None, # type: Optional[int] -): - # type: (...) -> Dict[str, Any] + include_local_variables: bool = True, + include_source_context: bool = True, + max_value_length: Optional[int] = None, +) -> Dict[str, Any]: __tracebackhide__ = True frames = [] - f = sys._getframe() # type: Optional[FrameType] + f: Optional[FrameType] = sys._getframe() while f is not None: if not should_hide_frame(f): frames.append( @@ -646,24 +605,22 @@ def current_stacktrace( return {"frames": frames} -def get_errno(exc_value): - # type: (BaseException) -> Optional[Any] +def get_errno(exc_value: BaseException) -> Optional[Any]: return getattr(exc_value, "errno", None) -def get_error_message(exc_value): - # type: (Optional[BaseException]) -> str - message = ( +def get_error_message(exc_value: Optional[BaseException]) -> str: + message: str = ( getattr(exc_value, "message", "") or getattr(exc_value, "detail", "") or safe_str(exc_value) - ) # type: str + ) # __notes__ should be a list of strings when notes are added # via add_note, but can be anything else if __notes__ is set # directly. We only support strings in __notes__, since that # is the correct use. - notes = getattr(exc_value, "__notes__", None) # type: object + notes: object = getattr(exc_value, "__notes__", None) if isinstance(notes, list) and len(notes) > 0: message += "\n" + "\n".join(note for note in notes if isinstance(note, str)) @@ -671,24 +628,23 @@ def get_error_message(exc_value): def single_exception_from_error_tuple( - exc_type, # type: Optional[type] - exc_value, # type: Optional[BaseException] - tb, # type: Optional[TracebackType] - client_options=None, # type: Optional[Dict[str, Any]] - mechanism=None, # type: Optional[Dict[str, Any]] - exception_id=None, # type: Optional[int] - parent_id=None, # type: Optional[int] - source=None, # type: Optional[str] - full_stack=None, # type: Optional[list[dict[str, Any]]] -): - # type: (...) -> Dict[str, Any] + exc_type: Optional[type], + exc_value: Optional[BaseException], + tb: Optional[TracebackType], + client_options: Optional[Dict[str, Any]] = None, + mechanism: Optional[Dict[str, Any]] = None, + exception_id: Optional[int] = None, + parent_id: Optional[int] = None, + source: Optional[str] = None, + full_stack: Optional[list[dict[str, Any]]] = None, +) -> Dict[str, Any]: """ Creates a dict that goes into the events `exception.values` list and is ingestible by Sentry. See the Exception Interface documentation for more details: https://develop.sentry.dev/sdk/event-payloads/exception/ """ - exception_value = {} # type: Dict[str, Any] + exception_value: Dict[str, Any] = {} exception_value["mechanism"] = ( mechanism.copy() if mechanism else {"type": "generic", "handled": True} ) @@ -737,7 +693,7 @@ def single_exception_from_error_tuple( max_value_length = client_options["max_value_length"] custom_repr = client_options.get("custom_repr") - frames = [ + frames: List[Dict[str, Any]] = [ serialize_frame( tb.tb_frame, tb_lineno=tb.tb_lineno, @@ -749,7 +705,7 @@ def single_exception_from_error_tuple( # Process at most MAX_STACK_FRAMES + 1 frames, to avoid hanging on # processing a super-long stacktrace. for tb, _ in zip(iter_stacks(tb), range(MAX_STACK_FRAMES + 1)) - ] # type: List[Dict[str, Any]] + ] if len(frames) > MAX_STACK_FRAMES: # If we have more frames than the limit, we remove the stacktrace completely. @@ -777,12 +733,11 @@ def single_exception_from_error_tuple( if HAS_CHAINED_EXCEPTIONS: - def walk_exception_chain(exc_info): - # type: (ExcInfo) -> Iterator[ExcInfo] + def walk_exception_chain(exc_info: ExcInfo) -> Iterator[ExcInfo]: exc_type, exc_value, tb = exc_info seen_exceptions = [] - seen_exception_ids = set() # type: Set[int] + seen_exception_ids: Set[int] = set() while ( exc_type is not None @@ -809,32 +764,33 @@ def walk_exception_chain(exc_info): else: - def walk_exception_chain(exc_info): - # type: (ExcInfo) -> Iterator[ExcInfo] + def walk_exception_chain(exc_info: ExcInfo) -> Iterator[ExcInfo]: yield exc_info def exceptions_from_error( - exc_type, # type: Optional[type] - exc_value, # type: Optional[BaseException] - tb, # type: Optional[TracebackType] - client_options=None, # type: Optional[Dict[str, Any]] - mechanism=None, # type: Optional[Dict[str, Any]] - exception_id=0, # type: int - parent_id=0, # type: int - source=None, # type: Optional[str] - full_stack=None, # type: Optional[list[dict[str, Any]]] -): - # type: (...) -> Tuple[int, List[Dict[str, Any]]] + exc_type: Optional[type], + exc_value: Optional[BaseException], + tb: Optional[TracebackType], + client_options: Optional[Dict[str, Any]] = None, + mechanism: Optional[Dict[str, Any]] = None, + exception_id: int = 0, + parent_id: int = 0, + source: Optional[str] = None, + full_stack: Optional[list[dict[str, Any]]] = None, +) -> Tuple[int, List[Dict[str, Any]]]: """ - Creates the list of exceptions. - This can include chained exceptions and exceptions from an ExceptionGroup. - - See the Exception Interface documentation for more details: - https://develop.sentry.dev/sdk/event-payloads/exception/ + Converts the given exception information into the Sentry structured "exception" format. + This will return a list of exceptions (a flattened tree of exceptions) in the + format of the Exception Interface documentation: + https://develop.sentry.dev/sdk/data-model/event-payloads/exception/ + + This function can handle: + - simple exceptions + - chained exceptions (raise .. from ..) + - exception groups """ - - parent = single_exception_from_error_tuple( + base_exception = single_exception_from_error_tuple( exc_type=exc_type, exc_value=exc_value, tb=tb, @@ -845,64 +801,81 @@ def exceptions_from_error( source=source, full_stack=full_stack, ) - exceptions = [parent] + exceptions = [base_exception] parent_id = exception_id exception_id += 1 - should_supress_context = hasattr(exc_value, "__suppress_context__") and exc_value.__suppress_context__ # type: ignore - if should_supress_context: - # Add direct cause. - # The field `__cause__` is set when raised with the exception (using the `from` keyword). - exception_has_cause = ( + if exception_id > MAX_EXCEPTIONS - 1: + return (exception_id, exceptions) + + causing_exception = None + exception_source = None + + # Add any causing exceptions, if present. + should_suppress_context = ( + hasattr(exc_value, "__suppress_context__") and exc_value.__suppress_context__ # type: ignore[union-attr] + ) + # Note: __suppress_context__ is True if the exception is raised with the `from` keyword. + if should_suppress_context: + # Explicitly chained exceptions (Like: raise NewException() from OriginalException()) + # The field `__cause__` is set to OriginalException + has_explicit_causing_exception = ( exc_value and hasattr(exc_value, "__cause__") and exc_value.__cause__ is not None ) - if exception_has_cause: - cause = exc_value.__cause__ # type: ignore - (exception_id, child_exceptions) = exceptions_from_error( - exc_type=type(cause), - exc_value=cause, - tb=getattr(cause, "__traceback__", None), - client_options=client_options, - mechanism=mechanism, - exception_id=exception_id, - source="__cause__", - full_stack=full_stack, - ) - exceptions.extend(child_exceptions) - + if has_explicit_causing_exception: + exception_source = "__cause__" + causing_exception = exc_value.__cause__ # type: ignore else: - # Add indirect cause. - # The field `__context__` is assigned if another exception occurs while handling the exception. - exception_has_content = ( + # Implicitly chained exceptions (when an exception occurs while handling another exception) + # The field `__context__` is set in the exception that occurs while handling another exception, + # to the other exception. + has_implicit_causing_exception = ( exc_value and hasattr(exc_value, "__context__") and exc_value.__context__ is not None ) - if exception_has_content: - context = exc_value.__context__ # type: ignore - (exception_id, child_exceptions) = exceptions_from_error( - exc_type=type(context), - exc_value=context, - tb=getattr(context, "__traceback__", None), - client_options=client_options, - mechanism=mechanism, - exception_id=exception_id, - source="__context__", - full_stack=full_stack, - ) - exceptions.extend(child_exceptions) + if has_implicit_causing_exception: + exception_source = "__context__" + causing_exception = exc_value.__context__ # type: ignore + + if causing_exception: + # Some frameworks (e.g. FastAPI) wrap the causing exception in an + # ExceptionGroup that only contain one exception: the causing exception. + # This would lead to an infinite loop, so we skip the causing exception + # in this case. (because it is the same as the base_exception above) + if ( + BaseExceptionGroup is not None + and isinstance(causing_exception, BaseExceptionGroup) + and len(causing_exception.exceptions) == 1 + and causing_exception.exceptions[0] == exc_value + ): + causing_exception = None + + if causing_exception: + (exception_id, child_exceptions) = exceptions_from_error( + exc_type=type(causing_exception), + exc_value=causing_exception, + tb=getattr(causing_exception, "__traceback__", None), + client_options=client_options, + mechanism=mechanism, + exception_id=exception_id, + parent_id=parent_id, + source=exception_source, + full_stack=full_stack, + ) + exceptions.extend(child_exceptions) - # Add exceptions from an ExceptionGroup. + # Add child exceptions from an ExceptionGroup. is_exception_group = exc_value and hasattr(exc_value, "exceptions") if is_exception_group: - for idx, e in enumerate(exc_value.exceptions): # type: ignore + for idx, causing_exception in enumerate(exc_value.exceptions): # type: ignore (exception_id, child_exceptions) = exceptions_from_error( - exc_type=type(e), - exc_value=e, - tb=getattr(e, "__traceback__", None), + exc_type=type(causing_exception), + exc_value=causing_exception, + tb=getattr(causing_exception, "__traceback__", None), client_options=client_options, mechanism=mechanism, exception_id=exception_id, @@ -916,59 +889,47 @@ def exceptions_from_error( def exceptions_from_error_tuple( - exc_info, # type: ExcInfo - client_options=None, # type: Optional[Dict[str, Any]] - mechanism=None, # type: Optional[Dict[str, Any]] - full_stack=None, # type: Optional[list[dict[str, Any]]] -): - # type: (...) -> List[Dict[str, Any]] + exc_info: ExcInfo, + client_options: Optional[Dict[str, Any]] = None, + mechanism: Optional[Dict[str, Any]] = None, + full_stack: Optional[list[dict[str, Any]]] = None, +) -> List[Dict[str, Any]]: + """ + Convert Python's exception information into Sentry's structured "exception" format in the event. + See https://develop.sentry.dev/sdk/data-model/event-payloads/exception/ + This is the entry point for the exception handling. + """ + # unpack the exception info tuple exc_type, exc_value, tb = exc_info - is_exception_group = BaseExceptionGroup is not None and isinstance( - exc_value, BaseExceptionGroup + # let exceptions_from_error do the actual work + _, exceptions = exceptions_from_error( + exc_type=exc_type, + exc_value=exc_value, + tb=tb, + client_options=client_options, + mechanism=mechanism, + exception_id=0, + parent_id=0, + full_stack=full_stack, ) - if is_exception_group: - (_, exceptions) = exceptions_from_error( - exc_type=exc_type, - exc_value=exc_value, - tb=tb, - client_options=client_options, - mechanism=mechanism, - exception_id=0, - parent_id=0, - full_stack=full_stack, - ) - - else: - exceptions = [] - for exc_type, exc_value, tb in walk_exception_chain(exc_info): - exceptions.append( - single_exception_from_error_tuple( - exc_type=exc_type, - exc_value=exc_value, - tb=tb, - client_options=client_options, - mechanism=mechanism, - full_stack=full_stack, - ) - ) - + # make sure the exceptions are sorted + # from the innermost (oldest) + # to the outermost (newest) exception exceptions.reverse() return exceptions -def to_string(value): - # type: (str) -> str +def to_string(value: Any) -> str: try: return str(value) except UnicodeDecodeError: return repr(value)[1:-1] -def iter_event_stacktraces(event): - # type: (Event) -> Iterator[Annotated[Dict[str, Any]]] +def iter_event_stacktraces(event: Event) -> Iterator[Annotated[Dict[str, Any]]]: if "stacktrace" in event: yield event["stacktrace"] if "threads" in event: @@ -981,8 +942,7 @@ def iter_event_stacktraces(event): yield exception["stacktrace"] -def iter_event_frames(event): - # type: (Event) -> Iterator[Dict[str, Any]] +def iter_event_frames(event: Event) -> Iterator[Dict[str, Any]]: for stacktrace in iter_event_stacktraces(event): if isinstance(stacktrace, AnnotatedValue): stacktrace = stacktrace.value or {} @@ -991,8 +951,12 @@ def iter_event_frames(event): yield frame -def handle_in_app(event, in_app_exclude=None, in_app_include=None, project_root=None): - # type: (Event, Optional[List[str]], Optional[List[str]], Optional[str]) -> Event +def handle_in_app( + event: Event, + in_app_exclude: Optional[List[str]] = None, + in_app_include: Optional[List[str]] = None, + project_root: Optional[str] = None, +) -> Event: for stacktrace in iter_event_stacktraces(event): if isinstance(stacktrace, AnnotatedValue): stacktrace = stacktrace.value or {} @@ -1007,8 +971,12 @@ def handle_in_app(event, in_app_exclude=None, in_app_include=None, project_root= return event -def set_in_app_in_frames(frames, in_app_exclude, in_app_include, project_root=None): - # type: (Any, Optional[List[str]], Optional[List[str]], Optional[str]) -> Optional[Any] +def set_in_app_in_frames( + frames: Any, + in_app_exclude: Optional[List[str]], + in_app_include: Optional[List[str]], + project_root: Optional[str] = None, +) -> Optional[Any]: if not frames: return None @@ -1046,8 +1014,7 @@ def set_in_app_in_frames(frames, in_app_exclude, in_app_include, project_root=No return frames -def exc_info_from_error(error): - # type: (Union[BaseException, ExcInfo]) -> ExcInfo +def exc_info_from_error(error: Union[BaseException, ExcInfo]) -> ExcInfo: if isinstance(error, tuple) and len(error) == 3: exc_type, exc_value, tb = error elif isinstance(error, BaseException): @@ -1065,18 +1032,17 @@ def exc_info_from_error(error): else: raise ValueError("Expected Exception object to report, got %s!" % type(error)) - exc_info = (exc_type, exc_value, tb) - - if TYPE_CHECKING: - # This cast is safe because exc_type and exc_value are either both - # None or both not None. - exc_info = cast(ExcInfo, exc_info) - - return exc_info + if exc_type is not None and exc_value is not None: + return (exc_type, exc_value, tb) + else: + return (None, None, None) -def merge_stack_frames(frames, full_stack, client_options): - # type: (List[Dict[str, Any]], List[Dict[str, Any]], Optional[Dict[str, Any]]) -> List[Dict[str, Any]] +def merge_stack_frames( + frames: List[Dict[str, Any]], + full_stack: List[Dict[str, Any]], + client_options: Optional[Dict[str, Any]], +) -> List[Dict[str, Any]]: """ Add the missing frames from full_stack to frames and return the merged list. """ @@ -1116,11 +1082,10 @@ def merge_stack_frames(frames, full_stack, client_options): def event_from_exception( - exc_info, # type: Union[BaseException, ExcInfo] - client_options=None, # type: Optional[Dict[str, Any]] - mechanism=None, # type: Optional[Dict[str, Any]] -): - # type: (...) -> Tuple[Event, Dict[str, Any]] + exc_info: Union[BaseException, ExcInfo], + client_options: Optional[Dict[str, Any]] = None, + mechanism: Optional[Dict[str, Any]] = None, +) -> Tuple[Event, Dict[str, Any]]: exc_info = exc_info_from_error(exc_info) hint = event_hint_with_exc_info(exc_info) @@ -1145,8 +1110,7 @@ def event_from_exception( ) -def _module_in_list(name, items): - # type: (Optional[str], Optional[List[str]]) -> bool +def _module_in_list(name: Optional[str], items: Optional[List[str]]) -> bool: if name is None: return False @@ -1160,8 +1124,7 @@ def _module_in_list(name, items): return False -def _is_external_source(abs_path): - # type: (Optional[str]) -> bool +def _is_external_source(abs_path: Optional[str]) -> bool: # check if frame is in 'site-packages' or 'dist-packages' if abs_path is None: return False @@ -1172,8 +1135,7 @@ def _is_external_source(abs_path): return external_source -def _is_in_project_root(abs_path, project_root): - # type: (Optional[str], Optional[str]) -> bool +def _is_in_project_root(abs_path: Optional[str], project_root: Optional[str]) -> bool: if abs_path is None or project_root is None: return False @@ -1184,8 +1146,7 @@ def _is_in_project_root(abs_path, project_root): return False -def _truncate_by_bytes(string, max_bytes): - # type: (str, int) -> str +def _truncate_by_bytes(string: str, max_bytes: int) -> str: """ Truncate a UTF-8-encodable string to the last full codepoint so that it fits in max_bytes. """ @@ -1194,16 +1155,16 @@ def _truncate_by_bytes(string, max_bytes): return truncated + "..." -def _get_size_in_bytes(value): - # type: (str) -> Optional[int] +def _get_size_in_bytes(value: str) -> Optional[int]: try: return len(value.encode("utf-8")) except (UnicodeEncodeError, UnicodeDecodeError): return None -def strip_string(value, max_length=None): - # type: (str, Optional[int]) -> Union[AnnotatedValue, str] +def strip_string( + value: str, max_length: Optional[int] = None +) -> Union[AnnotatedValue, str]: if not value: return value @@ -1231,8 +1192,7 @@ def strip_string(value, max_length=None): ) -def parse_version(version): - # type: (str) -> Optional[Tuple[int, ...]] +def parse_version(version: str) -> Optional[Tuple[int, ...]]: """ Parses a version string into a tuple of integers. This uses the parsing loging from PEP 440: @@ -1276,17 +1236,16 @@ def parse_version(version): try: release = pattern.match(version).groupdict()["release"] # type: ignore - release_tuple = tuple(map(int, release.split(".")[:3])) # type: Tuple[int, ...] + release_tuple: Tuple[int, ...] = tuple(map(int, release.split(".")[:3])) except (TypeError, ValueError, AttributeError): return None return release_tuple -def _is_contextvars_broken(): - # type: () -> bool +def _is_contextvars_broken() -> bool: """ - Returns whether gevent/eventlet have patched the stdlib in a way where thread locals are now more "correct" than contextvars. + Returns whether gevent has patched the stdlib in a way where thread locals are now more "correct" than contextvars. """ try: import gevent @@ -1315,52 +1274,30 @@ def _is_contextvars_broken(): except ImportError: pass - try: - import greenlet - from eventlet.patcher import is_monkey_patched # type: ignore - - greenlet_version = parse_version(greenlet.__version__) - - if greenlet_version is None: - logger.error( - "Internal error in Sentry SDK: Could not parse Greenlet version from greenlet.__version__." - ) - return False - - if is_monkey_patched("thread") and greenlet_version < (0, 5): - return True - except ImportError: - pass - return False -def _make_threadlocal_contextvars(local): - # type: (type) -> type +def _make_threadlocal_contextvars(local: type) -> type: class ContextVar: # Super-limited impl of ContextVar - def __init__(self, name, default=None): - # type: (str, Any) -> None + def __init__(self, name: str, default: Optional[Any] = None) -> None: self._name = name self._default = default self._local = local() self._original_local = local() - def get(self, default=None): - # type: (Any) -> Any + def get(self, default: Optional[Any] = None) -> Any: return getattr(self._local, "value", default or self._default) - def set(self, value): - # type: (Any) -> Any + def set(self, value: Any) -> Any: token = str(random.getrandbits(64)) original_value = self.get() setattr(self._original_local, token, original_value) self._local.value = value return token - def reset(self, token): - # type: (Any) -> None + def reset(self, token: Any) -> None: self._local.value = getattr(self._original_local, token) # delete the original value (this way it works in Python 3.6+) del self._original_local.__dict__[token] @@ -1368,8 +1305,7 @@ def reset(self, token): return ContextVar -def _get_contextvars(): - # type: () -> Tuple[bool, type] +def _get_contextvars() -> Tuple[bool, type]: """ Figure out the "right" contextvars installation to use. Returns a `contextvars.ContextVar`-like class with a limited API. @@ -1377,27 +1313,13 @@ def _get_contextvars(): See https://docs.sentry.io/platforms/python/contextvars/ for more information. """ if not _is_contextvars_broken(): - # aiocontextvars is a PyPI package that ensures that the contextvars - # backport (also a PyPI package) works with asyncio under Python 3.6 - # - # Import it if available. - if sys.version_info < (3, 7): - # `aiocontextvars` is absolutely required for functional - # contextvars on Python 3.6. - try: - from aiocontextvars import ContextVar - - return True, ContextVar - except ImportError: - pass - else: - # On Python 3.7 contextvars are functional. - try: - from contextvars import ContextVar + # On Python 3.7+ contextvars are functional. + try: + from contextvars import ContextVar - return True, ContextVar - except ImportError: - pass + return True, ContextVar + except ImportError: + pass # Fall back to basic thread-local usage. @@ -1418,10 +1340,9 @@ def _get_contextvars(): """ -def qualname_from_function(func): - # type: (Callable[..., Any]) -> Optional[str] +def qualname_from_function(func: Callable[..., Any]) -> Optional[str]: """Return the qualified name of func. Works with regular function, lambda, partial and partialmethod.""" - func_qualname = None # type: Optional[str] + func_qualname: Optional[str] = None # Python 2 try: @@ -1462,8 +1383,7 @@ def qualname_from_function(func): return func_qualname -def transaction_from_function(func): - # type: (Callable[..., Any]) -> Optional[str] +def transaction_from_function(func: Callable[..., Any]) -> Optional[str]: return qualname_from_function(func) @@ -1481,19 +1401,16 @@ class TimeoutThread(threading.Thread): waiting_time and raises a custom ServerlessTimeout exception. """ - def __init__(self, waiting_time, configured_timeout): - # type: (float, int) -> None + def __init__(self, waiting_time: float, configured_timeout: int) -> None: threading.Thread.__init__(self) self.waiting_time = waiting_time self.configured_timeout = configured_timeout self._stop_event = threading.Event() - def stop(self): - # type: () -> None + def stop(self) -> None: self._stop_event.set() - def run(self): - # type: () -> None + def run(self) -> None: self._stop_event.wait(self.waiting_time) @@ -1507,15 +1424,10 @@ def run(self): integer_configured_timeout = integer_configured_timeout + 1 # Raising Exception after timeout duration is reached - raise ServerlessTimeoutWarning( - "WARNING : Function is expected to get timed out. Configured timeout duration = {} seconds.".format( - integer_configured_timeout - ) - ) + raise ServerlessTimeoutWarning("WARNING: Function is about to time out.") -def to_base64(original): - # type: (str) -> Optional[str] +def to_base64(original: str) -> Optional[str]: """ Convert a string to base64, via UTF-8. Returns None on invalid input. """ @@ -1531,8 +1443,7 @@ def to_base64(original): return base64_string -def from_base64(base64_string): - # type: (str) -> Optional[str] +def from_base64(base64_string: str) -> Optional[str]: """ Convert a string from base64, via UTF-8. Returns None on invalid input. """ @@ -1556,8 +1467,12 @@ def from_base64(base64_string): Components = namedtuple("Components", ["scheme", "netloc", "path", "query", "fragment"]) -def sanitize_url(url, remove_authority=True, remove_query_values=True, split=False): - # type: (str, bool, bool, bool) -> Union[str, Components] +def sanitize_url( + url: str, + remove_authority: bool = True, + remove_query_values: bool = True, + split: bool = False, +) -> Union[str, Components]: """ Removes the authority and query parameter values from a given URL. """ @@ -1603,8 +1518,7 @@ def sanitize_url(url, remove_authority=True, remove_query_values=True, split=Fal ParsedUrl = namedtuple("ParsedUrl", ["url", "query", "fragment"]) -def parse_url(url, sanitize=True): - # type: (str, bool) -> ParsedUrl +def parse_url(url: str, sanitize: bool = True) -> ParsedUrl: """ Splits a URL into a url (including path), query and fragment. If sanitize is True, the query parameters will be sanitized to remove sensitive data. The autority (username and password) @@ -1631,11 +1545,11 @@ def parse_url(url, sanitize=True): ) -def is_valid_sample_rate(rate, source): - # type: (Any, str) -> bool +def is_valid_sample_rate(rate: Any, source: str) -> Optional[float]: """ Checks the given sample rate to make sure it is valid type and value (a boolean or a number between 0 and 1, inclusive). + Returns the final float value to use if valid. """ # both booleans and NaN are instances of Real, so a) checking for Real @@ -1647,7 +1561,7 @@ def is_valid_sample_rate(rate, source): source=source, rate=rate, type=type(rate) ) ) - return False + return None # in case rate is a boolean, it will get cast to 1 if it's True and 0 if it's False rate = float(rate) @@ -1657,13 +1571,14 @@ def is_valid_sample_rate(rate, source): source=source, rate=rate ) ) - return False + return None - return True + return rate -def match_regex_list(item, regex_list=None, substring_matching=False): - # type: (str, Optional[List[str]], bool) -> bool +def match_regex_list( + item: str, regex_list: Optional[List[str]] = None, substring_matching: bool = False +) -> bool: if regex_list is None: return False @@ -1678,8 +1593,7 @@ def match_regex_list(item, regex_list=None, substring_matching=False): return False -def is_sentry_url(client, url): - # type: (sentry_sdk.client.BaseClient, str) -> bool +def is_sentry_url(client: sentry_sdk.client.BaseClient, url: str) -> bool: """ Determines whether the given URL matches the Sentry DSN. """ @@ -1691,8 +1605,7 @@ def is_sentry_url(client, url): ) -def _generate_installed_modules(): - # type: () -> Iterator[Tuple[str, str]] +def _generate_installed_modules() -> Iterator[Tuple[str, str]]: try: from importlib import metadata @@ -1720,21 +1633,18 @@ def _generate_installed_modules(): yield _normalize_module_name(info.key), info.version -def _normalize_module_name(name): - # type: (str) -> str +def _normalize_module_name(name: str) -> str: return name.lower() -def _get_installed_modules(): - # type: () -> Dict[str, str] +def _get_installed_modules() -> Dict[str, str]: global _installed_modules if _installed_modules is None: _installed_modules = dict(_generate_installed_modules()) return _installed_modules -def package_version(package): - # type: (str) -> Optional[Tuple[int, ...]] +def package_version(package: str) -> Optional[Tuple[int, ...]]: installed_packages = _get_installed_modules() version = installed_packages.get(package) if version is None: @@ -1743,43 +1653,35 @@ def package_version(package): return parse_version(version) -def reraise(tp, value, tb=None): - # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[Any]) -> NoReturn +def reraise( + tp: Optional[Type[BaseException]], + value: Optional[BaseException], + tb: Optional[Any] = None, +) -> NoReturn: assert value is not None if value.__traceback__ is not tb: raise value.with_traceback(tb) raise value -def _no_op(*_a, **_k): - # type: (*Any, **Any) -> None - """No-op function for ensure_integration_enabled.""" - pass - - if TYPE_CHECKING: @overload def ensure_integration_enabled( - integration, # type: type[sentry_sdk.integrations.Integration] - original_function, # type: Callable[P, R] - ): - # type: (...) -> Callable[[Callable[P, R]], Callable[P, R]] - ... + integration: type[sentry_sdk.integrations.Integration], + original_function: Callable[P, R], + ) -> Callable[[Callable[P, R]], Callable[P, R]]: ... @overload def ensure_integration_enabled( - integration, # type: type[sentry_sdk.integrations.Integration] - ): - # type: (...) -> Callable[[Callable[P, None]], Callable[P, None]] - ... + integration: type[sentry_sdk.integrations.Integration], + ) -> Callable[[Callable[P, None]], Callable[P, None]]: ... def ensure_integration_enabled( - integration, # type: type[sentry_sdk.integrations.Integration] - original_function=_no_op, # type: Union[Callable[P, R], Callable[P, None]] -): - # type: (...) -> Callable[[Callable[P, R]], Callable[P, R]] + integration: type[sentry_sdk.integrations.Integration], + original_function: Optional[Callable[P, R]] = None, +) -> Callable[[Callable[P, R]], Callable[P, Optional[R]]]: """ Ensures a given integration is enabled prior to calling a Sentry-patched function. @@ -1797,47 +1699,29 @@ def ensure_integration_enabled( ```python @ensure_integration_enabled(MyIntegration, my_function) def patch_my_function(): - with sentry_sdk.start_transaction(...): + with sentry_sdk.start_span(...): return my_function() ``` """ - if TYPE_CHECKING: - # Type hint to ensure the default function has the right typing. The overloads - # ensure the default _no_op function is only used when R is None. - original_function = cast(Callable[P, R], original_function) - - def patcher(sentry_patched_function): - # type: (Callable[P, R]) -> Callable[P, R] - def runner(*args: "P.args", **kwargs: "P.kwargs"): - # type: (...) -> R - if sentry_sdk.get_client().get_integration(integration) is None: - return original_function(*args, **kwargs) - return sentry_patched_function(*args, **kwargs) + def patcher(sentry_patched_function: Callable[P, R]) -> Callable[P, Optional[R]]: + def runner(*args: P.args, **kwargs: P.kwargs) -> Optional[R]: + if sentry_sdk.get_client().get_integration(integration) is not None: + return sentry_patched_function(*args, **kwargs) + elif original_function is not None: + return original_function(*args, **kwargs) + else: + return None - if original_function is _no_op: + if original_function: + return wraps(original_function)(runner) + else: return wraps(sentry_patched_function)(runner) - return wraps(original_function)(runner) - return patcher -if PY37: - - def nanosecond_time(): - # type: () -> int - return time.perf_counter_ns() - -else: - - def nanosecond_time(): - # type: () -> int - return int(time.perf_counter() * 1e9) - - -def now(): - # type: () -> float +def now() -> float: return time.perf_counter() @@ -1847,24 +1731,22 @@ def now(): except ImportError: # it's not great that the signatures are different, get_hub can't return None - # consider adding an if TYPE_CHECKING to change the signature to Optional[Hub] - def get_gevent_hub(): # type: ignore[misc] - # type: () -> Optional[Hub] + # consider adding an if TYPE_CHECKING to change the signature to Optional[GeventHub] + def get_gevent_hub() -> Optional[GeventHub]: # type: ignore[misc] return None - def is_module_patched(mod_name): - # type: (str) -> bool + def is_module_patched(mod_name: str) -> bool: # unable to import from gevent means no modules have been patched return False -def is_gevent(): - # type: () -> bool +def is_gevent() -> bool: return is_module_patched("threading") or is_module_patched("_thread") -def get_current_thread_meta(thread=None): - # type: (Optional[threading.Thread]) -> Tuple[Optional[int], Optional[str]] +def get_current_thread_meta( + thread: Optional[threading.Thread] = None, +) -> Tuple[Optional[int], Optional[str]]: """ Try to get the id of the current thread, with various fall backs. """ @@ -1914,8 +1796,55 @@ def get_current_thread_meta(thread=None): return None, None -def should_be_treated_as_error(ty, value): - # type: (Any, Any) -> bool +def _serialize_span_attribute(value: Any) -> Optional[AttributeValue]: + """Serialize an object so that it's OTel-compatible and displays nicely in Sentry.""" + # check for allowed primitives + if isinstance(value, (int, str, float, bool)): + return value + + # lists are allowed too, as long as they don't mix types + if isinstance(value, (list, tuple)): + for type_ in (int, str, float, bool): + if all(isinstance(item, type_) for item in value): + return list(value) + + # if this is anything else, just try to coerce to string + # we prefer json.dumps since this makes things like dictionaries display + # nicely in the UI + try: + return json.dumps(value) + except TypeError: + try: + return str(value) + except Exception: + return None + + +ISO_TZ_SEPARATORS = frozenset(("+", "-")) + + +def datetime_from_isoformat(value: str) -> datetime: + try: + result = datetime.fromisoformat(value) + except (AttributeError, ValueError): + # py 3.6 + timestamp_format = ( + "%Y-%m-%dT%H:%M:%S.%f" if "." in value else "%Y-%m-%dT%H:%M:%S" + ) + if value.endswith("Z"): + value = value[:-1] + "+0000" + + if value[-6] in ISO_TZ_SEPARATORS: + timestamp_format += "%z" + value = value[:-3] + value[-2:] + elif value[-5] in ISO_TZ_SEPARATORS: + timestamp_format += "%z" + + result = datetime.strptime(value, timestamp_format) + return result.astimezone(timezone.utc) + + +def should_be_treated_as_error(ty: Any, value: Any) -> bool: if ty == SystemExit and hasattr(value, "code") and value.code in (0, None): # https://docs.python.org/3/library/exceptions.html#SystemExit return False @@ -1923,29 +1852,31 @@ def should_be_treated_as_error(ty, value): return True -if TYPE_CHECKING: - T = TypeVar("T") +def http_client_status_to_breadcrumb_level(status_code: Optional[int]) -> str: + if status_code is not None: + if 500 <= status_code <= 599: + return "error" + elif 400 <= status_code <= 499: + return "warning" + return "info" -def try_convert(convert_func, value): - # type: (Callable[[Any], T], Any) -> Optional[T] - """ - Attempt to convert from an unknown type to a specific type, using the - given function. Return None if the conversion fails, i.e. if the function - raises an exception. - """ - try: - return convert_func(value) - except Exception: - return None + +def set_thread_info_from_span( + data: Dict[str, Any], span: sentry_sdk.tracing.Span +) -> None: + if span.get_attribute(SPANDATA.THREAD_ID) is not None: + data[SPANDATA.THREAD_ID] = span.get_attribute(SPANDATA.THREAD_ID) + if span.get_attribute(SPANDATA.THREAD_NAME) is not None: + data[SPANDATA.THREAD_NAME] = span.get_attribute(SPANDATA.THREAD_NAME) -def safe_serialize(data): - # type: (Any) -> str +def safe_serialize(data: Any) -> str: """Safely serialize to a readable string.""" - def serialize_item(item): - # type: (Any) -> Union[str, dict[Any, Any], list[Any], tuple[Any, ...]] + def serialize_item( + item: Any, + ) -> Union[str, dict[Any, Any], list[Any], tuple[Any, ...]]: if callable(item): try: module = getattr(item, "__module__", None) @@ -1984,24 +1915,3 @@ def serialize_item(item): return json.dumps(serialized, default=str) except Exception: return str(data) - - -def has_logs_enabled(options): - # type: (Optional[dict[str, Any]]) -> bool - if options is None: - return False - - return bool( - options.get("enable_logs", False) - or options["_experiments"].get("enable_logs", False) - ) - - -def get_before_send_log(options): - # type: (Optional[dict[str, Any]]) -> Optional[Callable[[Log, Hint], Optional[Log]]] - if options is None: - return None - - return options.get("before_send_log") or options["_experiments"].get( - "before_send_log" - ) diff --git a/sentry_sdk/worker.py b/sentry_sdk/worker.py index b04ea582bc..d911e15623 100644 --- a/sentry_sdk/worker.py +++ b/sentry_sdk/worker.py @@ -1,3 +1,4 @@ +from __future__ import annotations import os import threading @@ -9,38 +10,32 @@ from typing import TYPE_CHECKING if TYPE_CHECKING: - from typing import Any - from typing import Optional - from typing import Callable + from typing import Any, Optional, Callable _TERMINATOR = object() class BackgroundWorker: - def __init__(self, queue_size=DEFAULT_QUEUE_SIZE): - # type: (int) -> None - self._queue = Queue(queue_size) # type: Queue + def __init__(self, queue_size: int = DEFAULT_QUEUE_SIZE) -> None: + self._queue: Queue = Queue(queue_size) self._lock = threading.Lock() - self._thread = None # type: Optional[threading.Thread] - self._thread_for_pid = None # type: Optional[int] + self._thread: Optional[threading.Thread] = None + self._thread_for_pid: Optional[int] = None @property - def is_alive(self): - # type: () -> bool + def is_alive(self) -> bool: if self._thread_for_pid != os.getpid(): return False if not self._thread: return False return self._thread.is_alive() - def _ensure_thread(self): - # type: () -> None + def _ensure_thread(self) -> None: if not self.is_alive: self.start() - def _timed_queue_join(self, timeout): - # type: (float) -> bool + def _timed_queue_join(self, timeout: float) -> bool: deadline = time() + timeout queue = self._queue @@ -57,8 +52,7 @@ def _timed_queue_join(self, timeout): finally: queue.all_tasks_done.release() - def start(self): - # type: () -> None + def start(self) -> None: with self._lock: if not self.is_alive: self._thread = threading.Thread( @@ -74,8 +68,7 @@ def start(self): # send out events. self._thread = None - def kill(self): - # type: () -> None + def kill(self) -> None: """ Kill worker thread. Returns immediately. Not useful for waiting on shutdown for events, use `flush` for that. @@ -91,20 +84,17 @@ def kill(self): self._thread = None self._thread_for_pid = None - def flush(self, timeout, callback=None): - # type: (float, Optional[Any]) -> None + def flush(self, timeout: float, callback: Optional[Any] = None) -> None: logger.debug("background worker got flush request") with self._lock: if self.is_alive and timeout > 0.0: self._wait_flush(timeout, callback) logger.debug("background worker flushed") - def full(self): - # type: () -> bool + def full(self) -> bool: return self._queue.full() - def _wait_flush(self, timeout, callback): - # type: (float, Optional[Any]) -> None + def _wait_flush(self, timeout: float, callback: Optional[Any]) -> None: initial_timeout = min(0.1, timeout) if not self._timed_queue_join(initial_timeout): pending = self._queue.qsize() + 1 @@ -116,8 +106,7 @@ def _wait_flush(self, timeout, callback): pending = self._queue.qsize() + 1 logger.error("flush timed out, dropped %s events", pending) - def submit(self, callback): - # type: (Callable[[], None]) -> bool + def submit(self, callback: Callable[[], None]) -> bool: self._ensure_thread() try: self._queue.put_nowait(callback) @@ -125,8 +114,7 @@ def submit(self, callback): except FullError: return False - def _target(self): - # type: () -> None + def _target(self) -> None: while True: callback = self._queue.get() try: diff --git a/setup.py b/setup.py index 11b02cbca8..748b7ac86f 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="2.34.1", + version="3.0.0a5", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", @@ -37,10 +37,11 @@ def get_file_text(file_name): package_data={"sentry_sdk": ["py.typed"]}, zip_safe=False, license="MIT", - python_requires=">=3.6", + python_requires=">=3.7", install_requires=[ "urllib3>=1.26.11", "certifi", + "opentelemetry-sdk>=1.4.0", ], extras_require={ "aiohttp": ["aiohttp>=3.5"], @@ -69,7 +70,6 @@ def get_file_text(file_name): "openai": ["openai>=1.0.0", "tiktoken>=0.3.0"], "openfeature": ["openfeature-sdk>=0.7.1"], "opentelemetry": ["opentelemetry-distro>=0.35b0"], - "opentelemetry-experimental": ["opentelemetry-distro"], "pure-eval": ["pure_eval", "executing", "asttokens"], "pymongo": ["pymongo>=3.1"], "pyspark": ["pyspark>=2.4.4"], @@ -85,8 +85,8 @@ def get_file_text(file_name): }, entry_points={ "opentelemetry_propagator": [ - "sentry=sentry_sdk.integrations.opentelemetry:SentryPropagator" - ] + "sentry=sentry_sdk.opentelemetry:SentryPropagator" + ], }, classifiers=[ "Development Status :: 5 - Production/Stable", @@ -96,7 +96,6 @@ def get_file_text(file_name): "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", diff --git a/tests/conftest.py b/tests/conftest.py index 01b1e9a81f..6312929e96 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -6,9 +6,14 @@ import gzip import io from threading import Thread -from contextlib import contextmanager +from opentelemetry import trace as otel_trace + +try: + from opentelemetry.util._once import Once +except ImportError: + Once = None + from http.server import BaseHTTPRequestHandler, HTTPServer -from unittest import mock from collections import namedtuple import pytest @@ -22,32 +27,20 @@ except ImportError: gevent = None -try: - import eventlet -except ImportError: - eventlet = None - import sentry_sdk import sentry_sdk.utils from sentry_sdk.envelope import Envelope, parse_json from sentry_sdk.integrations import ( # noqa: F401 - _DEFAULT_INTEGRATIONS, _installed_integrations, _processed_integrations, ) -from sentry_sdk.profiler import teardown_profiler +from sentry_sdk.profiler.transaction_profiler import teardown_profiler from sentry_sdk.profiler.continuous_profiler import teardown_continuous_profiler from sentry_sdk.transport import Transport from sentry_sdk.utils import reraise from tests import _warning_recorder, _warning_recorder_mgr -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from typing import Optional - from collections.abc import Iterator - SENTRY_EVENT_SCHEMA = "./checkouts/data-schemas/relay/event.schema.json" @@ -70,6 +63,10 @@ def benchmark(): from sentry_sdk import scope +from sentry_sdk.opentelemetry.scope import ( + setup_scope_context_management, + setup_initial_scopes, +) @pytest.fixture(autouse=True) @@ -81,6 +78,16 @@ def clean_scopes(): scope._isolation_scope.set(None) scope._current_scope.set(None) + setup_initial_scopes() + + +@pytest.fixture(autouse=True) +def clear_tracer_provider(): + """Reset TracerProvider so that we can set it up from scratch.""" + if Once is not None: + otel_trace._TRACER_PROVIDER_SET_ONCE = Once() + otel_trace._TRACER_PROVIDER = None + @pytest.fixture(autouse=True) def internal_exceptions(request): @@ -180,13 +187,8 @@ def reset_integrations(): with a clean slate to ensure monkeypatching works well, but this also means some other stuff will be monkeypatched twice. """ - global _DEFAULT_INTEGRATIONS, _processed_integrations - try: - _DEFAULT_INTEGRATIONS.remove( - "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration" - ) - except ValueError: - pass + global _installed_integrations, _processed_integrations + _processed_integrations.clear() _installed_integrations.clear() @@ -205,6 +207,7 @@ def inner(identifier): @pytest.fixture def sentry_init(request): def inner(*a, **kw): + setup_scope_context_management() kw.setdefault("transport", TestTransport()) client = sentry_sdk.Client(*a, **kw) sentry_sdk.get_global_scope().set_client(client) @@ -333,23 +336,11 @@ def read_flush(self): # scope=session ensures that fixture is run earlier @pytest.fixture( scope="session", - params=[None, "eventlet", "gevent"], - ids=("threads", "eventlet", "greenlet"), + params=[None, "gevent"], + ids=("threads", "greenlet"), ) def maybe_monkeypatched_threading(request): - if request.param == "eventlet": - if eventlet is None: - pytest.skip("no eventlet installed") - - try: - eventlet.monkey_patch() - except AttributeError as e: - if "'thread.RLock' object has no attribute" in str(e): - # https://bitbucket.org/pypy/pypy/issues/2962/gevent-cannot-patch-rlock-under-pypy-27-7 - pytest.skip("https://github.com/eventlet/eventlet/issues/546") - else: - raise - elif request.param == "gevent": + if request.param == "gevent": if gevent is None: pytest.skip("no gevent installed") try: @@ -642,23 +633,6 @@ def werkzeug_set_cookie(client, servername, key, value): client.set_cookie(key, value) -@contextmanager -def patch_start_tracing_child(fake_transaction_is_none=False): - # type: (bool) -> Iterator[Optional[mock.MagicMock]] - if not fake_transaction_is_none: - fake_transaction = mock.MagicMock() - fake_start_child = mock.MagicMock() - fake_transaction.start_child = fake_start_child - else: - fake_transaction = None - fake_start_child = None - - with mock.patch( - "sentry_sdk.tracing_utils.get_current_span", return_value=fake_transaction - ): - yield fake_start_child - - class ApproxDict(dict): def __eq__(self, other): # For an ApproxDict to equal another dict, the other dict just needs to contain @@ -671,6 +645,17 @@ def __ne__(self, other): return not self.__eq__(other) +class SortedBaggage: + def __init__(self, baggage): + self.baggage = baggage + + def __eq__(self, other): + return sorted(self.baggage.split(",")) == sorted(other.split(",")) + + def __ne__(self, other): + return not self.__eq__(other) + + CapturedData = namedtuple("CapturedData", ["path", "event", "envelope", "compressed"]) diff --git a/tests/integrations/aiohttp/test_aiohttp.py b/tests/integrations/aiohttp/test_aiohttp.py index dbb4286370..6cd625a4b5 100644 --- a/tests/integrations/aiohttp/test_aiohttp.py +++ b/tests/integrations/aiohttp/test_aiohttp.py @@ -1,6 +1,6 @@ import asyncio import json - +import re from contextlib import suppress from unittest import mock @@ -8,7 +8,6 @@ from aiohttp import web from aiohttp.client import ServerDisconnectedError -from aiohttp.web_request import Request from aiohttp.web_exceptions import ( HTTPInternalServerError, HTTPNetworkAuthenticationRequired, @@ -17,7 +16,7 @@ HTTPUnavailableForLegalReasons, ) -from sentry_sdk import capture_message, start_transaction +from sentry_sdk import capture_message, start_span from sentry_sdk.integrations.aiohttp import AioHttpIntegration from tests.conftest import ApproxDict @@ -293,13 +292,12 @@ async def hello(request): @pytest.mark.asyncio -async def test_traces_sampler_gets_request_object_in_sampling_context( +async def test_traces_sampler_gets_attributes_in_sampling_context( sentry_init, aiohttp_client, - DictionaryContaining, # noqa: N803 - ObjectDescribedBy, # noqa: N803 ): - traces_sampler = mock.Mock() + traces_sampler = mock.Mock(return_value=True) + sentry_init( integrations=[AioHttpIntegration()], traces_sampler=traces_sampler, @@ -312,17 +310,24 @@ async def kangaroo_handler(request): app.router.add_get("/tricks/kangaroo", kangaroo_handler) client = await aiohttp_client(app) - await client.get("/tricks/kangaroo") + await client.get( + "/tricks/kangaroo?jump=high", headers={"Custom-Header": "Custom Value"} + ) - traces_sampler.assert_any_call( - DictionaryContaining( - { - "aiohttp_request": ObjectDescribedBy( - type=Request, attrs={"method": "GET", "path": "/tricks/kangaroo"} - ) - } - ) + assert traces_sampler.call_count == 1 + sampling_context = traces_sampler.call_args_list[0][0][0] + assert isinstance(sampling_context, dict) + assert re.match( + r"http:\/\/127\.0\.0\.1:[0-9]{4,5}\/tricks\/kangaroo\?jump=high", + sampling_context["url.full"], ) + assert sampling_context["url.path"] == "/tricks/kangaroo" + assert sampling_context["url.query"] == "jump=high" + assert sampling_context["url.scheme"] == "http" + assert sampling_context["http.request.method"] == "GET" + assert sampling_context["server.address"] == "127.0.0.1" + assert sampling_context["server.port"].isnumeric() + assert sampling_context["http.request.header.custom-header"] == "Custom Value" @pytest.mark.asyncio @@ -413,7 +418,7 @@ async def hello(request): # The aiohttp_client is instrumented so will generate the sentry-trace header and add request. # Get the sentry-trace header from the request so we can later compare with transaction events. client = await aiohttp_client(app) - with start_transaction(): + with start_span(name="request"): # Headers are only added to the span if there is an active transaction resp = await client.get("/") @@ -492,7 +497,7 @@ async def handler(request): raw_server = await aiohttp_raw_server(handler) - with start_transaction(): + with start_span(name="breadcrumb"): events = capture_events() client = await aiohttp_client(raw_server) @@ -521,8 +526,8 @@ async def handler(request): @pytest.mark.parametrize( "status_code,level", [ - (200, None), - (301, None), + (200, "info"), + (301, "info"), (403, "warning"), (405, "warning"), (500, "error"), @@ -544,7 +549,7 @@ async def handler(request): raw_server = await aiohttp_raw_server(handler) - with start_transaction(): + with start_span(name="crumbs"): events = capture_events() client = await aiohttp_client(raw_server) @@ -556,10 +561,7 @@ async def handler(request): crumb = event["breadcrumbs"]["values"][0] assert crumb["type"] == "http" - if level is None: - assert "level" not in crumb - else: - assert crumb["level"] == level + assert crumb["level"] == level assert crumb["category"] == "httplib" assert crumb["data"] == ApproxDict( { @@ -573,34 +575,39 @@ async def handler(request): @pytest.mark.asyncio -async def test_outgoing_trace_headers(sentry_init, aiohttp_raw_server, aiohttp_client): +async def test_outgoing_trace_headers( + sentry_init, aiohttp_raw_server, aiohttp_client, capture_envelopes +): sentry_init( integrations=[AioHttpIntegration()], traces_sample_rate=1.0, ) + envelopes = capture_envelopes() + async def handler(request): return web.Response(text="OK") raw_server = await aiohttp_raw_server(handler) - with start_transaction( + with start_span( name="/interactions/other-dogs/new-dog", op="greeting.sniff", - # make trace_id difference between transactions - trace_id="0123456789012345678901234567890", ) as transaction: client = await aiohttp_client(raw_server) resp = await client.get("/") - request_span = transaction._span_recorder.spans[-1] - - assert resp.request_info.headers[ - "sentry-trace" - ] == "{trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction.trace_id, - parent_span_id=request_span.span_id, - sampled=1, - ) + + (envelope,) = envelopes + transaction = envelope.get_transaction_event() + request_span = transaction["spans"][-1] + + assert resp.request_info.headers[ + "sentry-trace" + ] == "{trace_id}-{parent_span_id}-{sampled}".format( + trace_id=transaction["contexts"]["trace"]["trace_id"], + parent_span_id=request_span["span_id"], + sampled=1, + ) @pytest.mark.asyncio @@ -619,17 +626,24 @@ async def handler(request): raw_server = await aiohttp_raw_server(handler) with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.5): - with start_transaction( + with start_span( name="/interactions/other-dogs/new-dog", op="greeting.sniff", - trace_id="0123456789012345678901234567890", - ): + ) as transaction: client = await aiohttp_client(raw_server) resp = await client.get("/", headers={"bagGage": "custom=value"}) - assert ( - resp.request_info.headers["baggage"] - == "custom=value,sentry-trace_id=0123456789012345678901234567890,sentry-sample_rand=0.500000,sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true" + assert sorted(resp.request_info.headers["baggage"].split(",")) == sorted( + [ + "custom=value", + f"sentry-trace_id={transaction.trace_id}", + "sentry-environment=production", + "sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42", + "sentry-transaction=/interactions/other-dogs/new-dog", + "sentry-sample_rate=1.0", + "sentry-sampled=true", + "sentry-sample_rand=0.500000", + ] ) diff --git a/tests/integrations/anthropic/test_anthropic.py b/tests/integrations/anthropic/test_anthropic.py index e6e1a40aa9..b238f2d09f 100644 --- a/tests/integrations/anthropic/test_anthropic.py +++ b/tests/integrations/anthropic/test_anthropic.py @@ -21,7 +21,7 @@ async def __call__(self, *args, **kwargs): from anthropic.types.message_start_event import MessageStartEvent from sentry_sdk.integrations.anthropic import _add_ai_data_to_span, _collect_ai_data -from sentry_sdk.utils import package_version +from sentry_sdk.utils import _serialize_span_attribute, package_version try: from anthropic.types import InputJSONDelta @@ -44,7 +44,7 @@ async def __call__(self, *args, **kwargs): except ImportError: from anthropic.types.content_block import ContentBlock as TextBlock -from sentry_sdk import start_transaction, start_span +from sentry_sdk import start_span from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations.anthropic import AnthropicIntegration @@ -92,7 +92,7 @@ def test_nonstreaming_create_message( } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): response = client.messages.create( max_tokens=1024, messages=messages, model="model" ) @@ -117,10 +117,12 @@ def test_nonstreaming_create_message( assert span["data"][SPANDATA.AI_MODEL_ID] == "model" if send_default_pii and include_prompts: - assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages - assert span["data"][SPANDATA.AI_RESPONSES] == [ - {"type": "text", "text": "Hi, I'm Claude."} - ] + assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == _serialize_span_attribute( + messages + ) + assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute( + [{"type": "text", "text": "Hi, I'm Claude."}] + ) else: assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] assert SPANDATA.AI_RESPONSES not in span["data"] @@ -128,7 +130,7 @@ def test_nonstreaming_create_message( assert span["data"]["gen_ai.usage.input_tokens"] == 10 assert span["data"]["gen_ai.usage.output_tokens"] == 20 assert span["data"]["gen_ai.usage.total_tokens"] == 30 - assert span["data"][SPANDATA.AI_STREAMING] is False + assert span["data"]["ai.streaming"] is False @pytest.mark.asyncio @@ -160,7 +162,7 @@ async def test_nonstreaming_create_message_async( } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): response = await client.messages.create( max_tokens=1024, messages=messages, model="model" ) @@ -185,10 +187,12 @@ async def test_nonstreaming_create_message_async( assert span["data"][SPANDATA.AI_MODEL_ID] == "model" if send_default_pii and include_prompts: - assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages - assert span["data"][SPANDATA.AI_RESPONSES] == [ - {"type": "text", "text": "Hi, I'm Claude."} - ] + assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == _serialize_span_attribute( + messages + ) + assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute( + [{"type": "text", "text": "Hi, I'm Claude."}] + ) else: assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] assert SPANDATA.AI_RESPONSES not in span["data"] @@ -196,7 +200,7 @@ async def test_nonstreaming_create_message_async( assert span["data"]["gen_ai.usage.input_tokens"] == 10 assert span["data"]["gen_ai.usage.output_tokens"] == 20 assert span["data"]["gen_ai.usage.total_tokens"] == 30 - assert span["data"][SPANDATA.AI_STREAMING] is False + assert span["data"]["ai.streaming"] is False @pytest.mark.parametrize( @@ -261,7 +265,7 @@ def test_streaming_create_message( } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): message = client.messages.create( max_tokens=1024, messages=messages, model="model", stream=True ) @@ -284,10 +288,12 @@ def test_streaming_create_message( assert span["data"][SPANDATA.AI_MODEL_ID] == "model" if send_default_pii and include_prompts: - assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages - assert span["data"][SPANDATA.AI_RESPONSES] == [ - {"type": "text", "text": "Hi! I'm Claude!"} - ] + assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == _serialize_span_attribute( + messages + ) + assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute( + [{"type": "text", "text": "Hi! I'm Claude!"}] + ) else: assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] @@ -296,7 +302,7 @@ def test_streaming_create_message( assert span["data"]["gen_ai.usage.input_tokens"] == 10 assert span["data"]["gen_ai.usage.output_tokens"] == 30 assert span["data"]["gen_ai.usage.total_tokens"] == 40 - assert span["data"][SPANDATA.AI_STREAMING] is True + assert span["data"]["ai.streaming"] is True @pytest.mark.asyncio @@ -364,7 +370,7 @@ async def test_streaming_create_message_async( } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): message = await client.messages.create( max_tokens=1024, messages=messages, model="model", stream=True ) @@ -387,10 +393,12 @@ async def test_streaming_create_message_async( assert span["data"][SPANDATA.AI_MODEL_ID] == "model" if send_default_pii and include_prompts: - assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages - assert span["data"][SPANDATA.AI_RESPONSES] == [ - {"type": "text", "text": "Hi! I'm Claude!"} - ] + assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == _serialize_span_attribute( + messages + ) + assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute( + [{"type": "text", "text": "Hi! I'm Claude!"}] + ) else: assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] @@ -399,7 +407,7 @@ async def test_streaming_create_message_async( assert span["data"]["gen_ai.usage.input_tokens"] == 10 assert span["data"]["gen_ai.usage.output_tokens"] == 30 assert span["data"]["gen_ai.usage.total_tokens"] == 40 - assert span["data"][SPANDATA.AI_STREAMING] is True + assert span["data"]["ai.streaming"] is True @pytest.mark.skipif( @@ -494,7 +502,7 @@ def test_streaming_create_message_with_input_json_delta( } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): message = client.messages.create( max_tokens=1024, messages=messages, model="model", stream=True ) @@ -517,10 +525,13 @@ def test_streaming_create_message_with_input_json_delta( assert span["data"][SPANDATA.AI_MODEL_ID] == "model" if send_default_pii and include_prompts: - assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages - assert span["data"][SPANDATA.AI_RESPONSES] == [ - {"text": "{'location': 'San Francisco, CA'}", "type": "text"} - ] + assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == _serialize_span_attribute( + messages + ) + assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute( + [{"type": "text", "text": "{'location': 'San Francisco, CA'}"}] + ) # we do not record InputJSONDelta because it could contain PII + else: assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] assert SPANDATA.AI_RESPONSES not in span["data"] @@ -528,7 +539,7 @@ def test_streaming_create_message_with_input_json_delta( assert span["data"]["gen_ai.usage.input_tokens"] == 366 assert span["data"]["gen_ai.usage.output_tokens"] == 51 assert span["data"]["gen_ai.usage.total_tokens"] == 417 - assert span["data"][SPANDATA.AI_STREAMING] is True + assert span["data"]["ai.streaming"] is True @pytest.mark.asyncio @@ -630,7 +641,7 @@ async def test_streaming_create_message_with_input_json_delta_async( } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): message = await client.messages.create( max_tokens=1024, messages=messages, model="model", stream=True ) @@ -653,10 +664,12 @@ async def test_streaming_create_message_with_input_json_delta_async( assert span["data"][SPANDATA.AI_MODEL_ID] == "model" if send_default_pii and include_prompts: - assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages - assert span["data"][SPANDATA.AI_RESPONSES] == [ - {"text": "{'location': 'San Francisco, CA'}", "type": "text"} - ] + assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == _serialize_span_attribute( + messages + ) + assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute( + [{"type": "text", "text": "{'location': 'San Francisco, CA'}"}] + ) # we do not record InputJSONDelta because it could contain PII else: assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] @@ -665,9 +678,10 @@ async def test_streaming_create_message_with_input_json_delta_async( assert span["data"]["gen_ai.usage.input_tokens"] == 366 assert span["data"]["gen_ai.usage.output_tokens"] == 51 assert span["data"]["gen_ai.usage.total_tokens"] == 417 - assert span["data"][SPANDATA.AI_STREAMING] is True + assert span["data"]["ai.streaming"] is True +@pytest.mark.forked def test_exception_message_create(sentry_init, capture_events): sentry_init(integrations=[AnthropicIntegration()], traces_sample_rate=1.0) events = capture_events() @@ -724,7 +738,7 @@ def test_span_origin(sentry_init, capture_events): } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): client.messages.create(max_tokens=1024, messages=messages, model="model") (event,) = events @@ -751,7 +765,7 @@ async def test_span_origin_async(sentry_init, capture_events): } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): await client.messages.create(max_tokens=1024, messages=messages, model="model") (event,) = events @@ -788,29 +802,35 @@ def test_collect_ai_data_with_input_json_delta(): ANTHROPIC_VERSION < (0, 27), reason="Versions <0.27.0 do not include InputJSONDelta.", ) -def test_add_ai_data_to_span_with_input_json_delta(sentry_init): +def test_add_ai_data_to_span_with_input_json_delta(sentry_init, capture_events): sentry_init( integrations=[AnthropicIntegration(include_prompts=True)], traces_sample_rate=1.0, send_default_pii=True, ) + events = capture_events() - with start_transaction(name="test"): - span = start_span() - integration = AnthropicIntegration() + with start_span(name="test"): + with start_span(name="anthropic") as span: + integration = AnthropicIntegration() - _add_ai_data_to_span( - span, - integration, - input_tokens=10, - output_tokens=20, - content_blocks=["{'test': 'data',", "'more': 'json'}"], - ) + _add_ai_data_to_span( + span, + integration, + input_tokens=10, + output_tokens=20, + content_blocks=["{'test': 'data',", "'more': 'json'}"], + ) - assert span._data.get("ai.responses") == [ - {"type": "text", "text": "{'test': 'data','more': 'json'}"} - ] - assert span._data.get("ai.streaming") is True - assert span._data.get("gen_ai.usage.input_tokens") == 10 - assert span._data.get("gen_ai.usage.output_tokens") == 20 - assert span._data.get("gen_ai.usage.total_tokens") == 30 + (event,) = events + + assert len(event["spans"]) == 1 + (span,) = event["spans"] + + assert span["data"]["ai.responses"] == _serialize_span_attribute( + [{"type": "text", "text": "{'test': 'data','more': 'json'}"}] + ) + assert span["data"]["ai.streaming"] is True + assert span["data"]["gen_ai.usage.input_tokens"] == 10 + assert span["data"]["gen_ai.usage.output_tokens"] == 20 + assert span["data"]["gen_ai.usage.total_tokens"] == 30 diff --git a/tests/integrations/ariadne/test_ariadne.py b/tests/integrations/ariadne/test_ariadne.py index 2c3b086aa5..6637a88451 100644 --- a/tests/integrations/ariadne/test_ariadne.py +++ b/tests/integrations/ariadne/test_ariadne.py @@ -68,7 +68,9 @@ def test_capture_request_and_response_if_send_pii_is_on_async( assert len(events) == 1 (event,) = events - assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne" + assert len(event["exception"]["values"]) == 2 + assert event["exception"]["values"][0]["mechanism"]["type"] == "chained" + assert event["exception"]["values"][-1]["mechanism"]["type"] == "ariadne" assert event["contexts"]["response"] == { "data": { "data": {"error": None}, @@ -111,7 +113,10 @@ def graphql_server(): assert len(events) == 1 (event,) = events - assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne" + assert len(event["exception"]["values"]) == 2 + assert event["exception"]["values"][0]["mechanism"]["type"] == "chained" + assert event["exception"]["values"][-1]["mechanism"]["type"] == "ariadne" + assert event["contexts"]["response"] == { "data": { "data": {"error": None}, @@ -152,7 +157,10 @@ def test_do_not_capture_request_and_response_if_send_pii_is_off_async( assert len(events) == 1 (event,) = events - assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne" + assert len(event["exception"]["values"]) == 2 + assert event["exception"]["values"][0]["mechanism"]["type"] == "chained" + assert event["exception"]["values"][-1]["mechanism"]["type"] == "ariadne" + assert "data" not in event["request"] assert "response" not in event["contexts"] @@ -182,7 +190,9 @@ def graphql_server(): assert len(events) == 1 (event,) = events - assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne" + assert len(event["exception"]["values"]) == 2 + assert event["exception"]["values"][0]["mechanism"]["type"] == "chained" + assert event["exception"]["values"][-1]["mechanism"]["type"] == "ariadne" assert "data" not in event["request"] assert "response" not in event["contexts"] diff --git a/tests/integrations/arq/test_arq.py b/tests/integrations/arq/test_arq.py index d8b7e715f2..ce3d624f1e 100644 --- a/tests/integrations/arq/test_arq.py +++ b/tests/integrations/arq/test_arq.py @@ -3,7 +3,7 @@ import pytest -from sentry_sdk import get_client, start_transaction +from sentry_sdk import get_client, start_span from sentry_sdk.integrations.arq import ArqIntegration import arq.worker @@ -294,7 +294,7 @@ async def dummy_job(_): events = capture_events() - with start_transaction() as transaction: + with start_span(name="test") as transaction: await pool.enqueue_job("dummy_job") (event,) = events @@ -345,7 +345,7 @@ async def dummy_job(_): events = capture_events() - with start_transaction(): + with start_span(name="job"): await pool.enqueue_job("dummy_job") (event,) = events diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py index ec2796c140..9e97ae3651 100644 --- a/tests/integrations/asgi/test_asgi.py +++ b/tests/integrations/asgi/test_asgi.py @@ -720,3 +720,26 @@ async def test_custom_transaction_name( assert transaction_event["type"] == "transaction" assert transaction_event["transaction"] == "foobar" assert transaction_event["transaction_info"] == {"source": "custom"} + + +@pytest.mark.asyncio +async def test_asgi_scope_in_traces_sampler(sentry_init, asgi3_app): + def dummy_traces_sampler(sampling_context): + assert sampling_context["url.path"] == "/test" + assert sampling_context["url.scheme"] == "http" + assert sampling_context["url.query"] == "hello=there" + assert sampling_context["url.full"] == "/test?hello=there" + assert sampling_context["http.request.method"] == "GET" + assert sampling_context["network.protocol.version"] == "1.1" + assert sampling_context["network.protocol.name"] == "http" + assert sampling_context["http.request.header.custom-header"] == "Custom Value" + + sentry_init( + traces_sampler=dummy_traces_sampler, + traces_sample_rate=1.0, + ) + + app = SentryAsgiMiddleware(asgi3_app) + + async with TestClient(app) as client: + await client.get("/test?hello=there", headers={"Custom-Header": "Custom Value"}) diff --git a/tests/integrations/asyncio/test_asyncio.py b/tests/integrations/asyncio/test_asyncio.py index fb75bfc69b..2ae71f8f43 100644 --- a/tests/integrations/asyncio/test_asyncio.py +++ b/tests/integrations/asyncio/test_asyncio.py @@ -65,7 +65,7 @@ async def test_create_task( events = capture_events() - with sentry_sdk.start_transaction(name="test_transaction_for_create_task"): + with sentry_sdk.start_span(name="test_transaction_for_create_task"): with sentry_sdk.start_span(op="root", name="not so important"): tasks = [asyncio.create_task(foo()), asyncio.create_task(bar())] await asyncio.wait(tasks, return_when=asyncio.FIRST_EXCEPTION) @@ -108,7 +108,7 @@ async def test_gather( events = capture_events() - with sentry_sdk.start_transaction(name="test_transaction_for_gather"): + with sentry_sdk.start_span(name="test_transaction_for_gather"): with sentry_sdk.start_span(op="root", name="not so important"): await asyncio.gather(foo(), bar(), return_exceptions=True) @@ -150,7 +150,8 @@ async def test_exception( events = capture_events() - with sentry_sdk.start_transaction(name="test_exception"): + with sentry_sdk.start_span(name="test_exception"): + sentry_sdk.get_isolation_scope().set_transaction_name("test_exception") with sentry_sdk.start_span(op="root", name="not so important"): tasks = [asyncio.create_task(boom()), asyncio.create_task(bar())] await asyncio.wait(tasks, return_when=asyncio.FIRST_EXCEPTION) @@ -364,7 +365,7 @@ async def test_span_origin( events = capture_events() - with sentry_sdk.start_transaction(name="something"): + with sentry_sdk.start_span(name="something"): tasks = [ asyncio.create_task(foo()), ] diff --git a/tests/integrations/asyncpg/test_asyncpg.py b/tests/integrations/asyncpg/test_asyncpg.py index e36d15c5d2..579052da27 100644 --- a/tests/integrations/asyncpg/test_asyncpg.py +++ b/tests/integrations/asyncpg/test_asyncpg.py @@ -10,14 +10,6 @@ """ import os - - -PG_HOST = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_HOST", "localhost") -PG_PORT = int(os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PORT", "5432")) -PG_USER = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_USER", "postgres") -PG_PASSWORD = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PASSWORD", "sentry") -PG_NAME = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_NAME", "postgres") - import datetime from contextlib import contextmanager from unittest import mock @@ -26,17 +18,25 @@ import pytest import pytest_asyncio from asyncpg import connect, Connection +from freezegun import freeze_time -from sentry_sdk import capture_message, start_transaction +from sentry_sdk import capture_message, start_span from sentry_sdk.integrations.asyncpg import AsyncPGIntegration from sentry_sdk.consts import SPANDATA from sentry_sdk.tracing_utils import record_sql_queries from tests.conftest import ApproxDict +PG_HOST = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_HOST", "localhost") +PG_PORT = int(os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PORT", "5432")) +PG_USER = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_USER", "postgres") +PG_PASSWORD = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PASSWORD", "sentry") +PG_NAME = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_NAME", "postgres") + PG_CONNECTION_URI = "postgresql://{}:{}@{}/{}".format( PG_USER, PG_PASSWORD, PG_HOST, PG_NAME ) + CRUMBS_CONNECT = { "category": "query", "data": ApproxDict( @@ -84,7 +84,7 @@ async def test_connect(sentry_init, capture_events) -> None: capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -123,7 +123,7 @@ async def test_execute(sentry_init, capture_events) -> None: capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -179,7 +179,7 @@ async def test_execute_many(sentry_init, capture_events) -> None: capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -216,7 +216,7 @@ async def test_record_params(sentry_init, capture_events) -> None: capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -259,13 +259,13 @@ async def test_cursor(sentry_init, capture_events) -> None: async for record in conn.cursor( "SELECT * FROM users WHERE dob > $1", datetime.date(1970, 1, 1) ): - print(record) + pass await conn.close() capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -278,14 +278,24 @@ async def test_cursor(sentry_init, capture_events) -> None: "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)", "type": "default", }, - {"category": "query", "data": {}, "message": "BEGIN;", "type": "default"}, + { + "category": "query", + "data": {}, + "message": "BEGIN;", + "type": "default", + }, { "category": "query", "data": {}, "message": "SELECT * FROM users WHERE dob > $1", "type": "default", }, - {"category": "query", "data": {}, "message": "COMMIT;", "type": "default"}, + { + "category": "query", + "data": {}, + "message": "COMMIT;", + "type": "default", + }, ] @@ -306,24 +316,22 @@ async def test_cursor_manual(sentry_init, capture_events) -> None: ("Alice", "pw", datetime.date(1990, 12, 25)), ], ) - # + async with conn.transaction(): # Postgres requires non-scrollable cursors to be created # and used in a transaction. cur = await conn.cursor( "SELECT * FROM users WHERE dob > $1", datetime.date(1970, 1, 1) ) - record = await cur.fetchrow() - print(record) + await cur.fetchrow() while await cur.forward(1): - record = await cur.fetchrow() - print(record) + await cur.fetchrow() await conn.close() capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -336,14 +344,24 @@ async def test_cursor_manual(sentry_init, capture_events) -> None: "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)", "type": "default", }, - {"category": "query", "data": {}, "message": "BEGIN;", "type": "default"}, + { + "category": "query", + "data": {}, + "message": "BEGIN;", + "type": "default", + }, { "category": "query", "data": {}, "message": "SELECT * FROM users WHERE dob > $1", "type": "default", }, - {"category": "query", "data": {}, "message": "COMMIT;", "type": "default"}, + { + "category": "query", + "data": {}, + "message": "COMMIT;", + "type": "default", + }, ] @@ -367,14 +385,14 @@ async def test_prepared_stmt(sentry_init, capture_events) -> None: stmt = await conn.prepare("SELECT * FROM users WHERE name = $1") - print(await stmt.fetchval("Bob")) - print(await stmt.fetchval("Alice")) + await stmt.fetchval("Bob") + await stmt.fetchval("Alice") await conn.close() capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -426,7 +444,7 @@ async def test_connection_pool(sentry_init, capture_events) -> None: capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -471,7 +489,7 @@ async def test_connection_pool(sentry_init, capture_events) -> None: async def test_query_source_disabled(sentry_init, capture_events): sentry_options = { "integrations": [AsyncPGIntegration()], - "enable_tracing": True, + "traces_sample_rate": 1.0, "enable_db_query_source": False, "db_query_source_threshold_ms": 0, } @@ -480,7 +498,7 @@ async def test_query_source_disabled(sentry_init, capture_events): events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with start_span(name="test_span"): conn: Connection = await connect(PG_CONNECTION_URI) await conn.execute( @@ -509,7 +527,7 @@ async def test_query_source_enabled( ): sentry_options = { "integrations": [AsyncPGIntegration()], - "enable_tracing": True, + "traces_sample_rate": 1.0, "db_query_source_threshold_ms": 0, } if enable_db_query_source is not None: @@ -519,7 +537,7 @@ async def test_query_source_enabled( events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with start_span(name="test_span"): conn: Connection = await connect(PG_CONNECTION_URI) await conn.execute( @@ -545,14 +563,14 @@ async def test_query_source_enabled( async def test_query_source(sentry_init, capture_events): sentry_init( integrations=[AsyncPGIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=0, ) events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with start_span(name="test_span"): conn: Connection = await connect(PG_CONNECTION_URI) await conn.execute( @@ -595,7 +613,7 @@ async def test_query_source_with_module_in_search_path(sentry_init, capture_even """ sentry_init( integrations=[AsyncPGIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=0, ) @@ -604,7 +622,7 @@ async def test_query_source_with_module_in_search_path(sentry_init, capture_even from asyncpg_helpers.helpers import execute_query_in_connection - with start_transaction(name="test_transaction", sampled=True): + with start_span(name="test_span"): conn: Connection = await connect(PG_CONNECTION_URI) await execute_query_in_connection( @@ -641,31 +659,33 @@ async def test_query_source_with_module_in_search_path(sentry_init, capture_even async def test_no_query_source_if_duration_too_short(sentry_init, capture_events): sentry_init( integrations=[AsyncPGIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=100, ) events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with start_span(name="test_span"): conn: Connection = await connect(PG_CONNECTION_URI) @contextmanager def fake_record_sql_queries(*args, **kwargs): - with record_sql_queries(*args, **kwargs) as span: - pass - span.start_timestamp = datetime.datetime(2024, 1, 1, microsecond=0) - span.timestamp = datetime.datetime(2024, 1, 1, microsecond=99999) - yield span + with freeze_time(datetime.datetime(2024, 1, 1, microsecond=99999)): + with record_sql_queries(*args, **kwargs) as span: + yield span with mock.patch( - "sentry_sdk.integrations.asyncpg.record_sql_queries", - fake_record_sql_queries, + "sentry_sdk.tracing.Span.start_timestamp", + datetime.datetime(2024, 1, 1, microsecond=0, tzinfo=datetime.timezone.utc), ): - await conn.execute( - "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')", - ) + with mock.patch( + "sentry_sdk.integrations.asyncpg.record_sql_queries", + fake_record_sql_queries, + ): + await conn.execute( + "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')", + ) await conn.close() @@ -686,31 +706,33 @@ def fake_record_sql_queries(*args, **kwargs): async def test_query_source_if_duration_over_threshold(sentry_init, capture_events): sentry_init( integrations=[AsyncPGIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=100, ) events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with start_span(name="test_span"): conn: Connection = await connect(PG_CONNECTION_URI) @contextmanager def fake_record_sql_queries(*args, **kwargs): - with record_sql_queries(*args, **kwargs) as span: - pass - span.start_timestamp = datetime.datetime(2024, 1, 1, microsecond=0) - span.timestamp = datetime.datetime(2024, 1, 1, microsecond=100001) - yield span + with freeze_time(datetime.datetime(2024, 1, 1, microsecond=100001)): + with record_sql_queries(*args, **kwargs) as span: + yield span with mock.patch( - "sentry_sdk.integrations.asyncpg.record_sql_queries", - fake_record_sql_queries, + "sentry_sdk.tracing.Span.start_timestamp", + datetime.datetime(2024, 1, 1, microsecond=0, tzinfo=datetime.timezone.utc), ): - await conn.execute( - "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')", - ) + with mock.patch( + "sentry_sdk.integrations.asyncpg.record_sql_queries", + fake_record_sql_queries, + ): + await conn.execute( + "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')", + ) await conn.close() @@ -753,7 +775,7 @@ async def test_span_origin(sentry_init, capture_events): events = capture_events() - with start_transaction(name="test_transaction"): + with start_span(name="test_span"): conn: Connection = await connect(PG_CONNECTION_URI) await conn.execute("SELECT 1") diff --git a/tests/integrations/aws_lambda/lambda_functions_with_embedded_sdk/TracesSampler/index.py b/tests/integrations/aws_lambda/lambda_functions_with_embedded_sdk/TracesSampler/index.py index ce797faf71..bc2693d9b5 100644 --- a/tests/integrations/aws_lambda/lambda_functions_with_embedded_sdk/TracesSampler/index.py +++ b/tests/integrations/aws_lambda/lambda_functions_with_embedded_sdk/TracesSampler/index.py @@ -4,26 +4,14 @@ from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration # Global variables to store sampling context for verification -sampling_context_data = { - "aws_event_present": False, - "aws_context_present": False, - "event_data": None, -} +sampling_context_data = None def trace_sampler(sampling_context): # Store the sampling context for verification global sampling_context_data + sampling_context_data = sampling_context - # Check if aws_event and aws_context are in the sampling_context - if "aws_event" in sampling_context: - sampling_context_data["aws_event_present"] = True - sampling_context_data["event_data"] = sampling_context["aws_event"] - - if "aws_context" in sampling_context: - sampling_context_data["aws_context_present"] = True - - print("Sampling context data:", sampling_context_data) return 1.0 # Always sample diff --git a/tests/integrations/aws_lambda/test_aws_lambda.py b/tests/integrations/aws_lambda/test_aws_lambda.py index 85da7e0b14..da80832833 100644 --- a/tests/integrations/aws_lambda/test_aws_lambda.py +++ b/tests/integrations/aws_lambda/test_aws_lambda.py @@ -67,7 +67,7 @@ def test_environment(): try: # Wait for SAM to be ready - LocalLambdaStack.wait_for_stack() + LocalLambdaStack.wait_for_stack(log_file=debug_log_file) def before_test(): server.clear_envelopes() @@ -137,12 +137,12 @@ def test_basic_no_exception(lambda_client, test_environment): } assert transaction_event["contexts"]["trace"] == { "op": "function.aws", - "description": mock.ANY, "span_id": mock.ANY, "parent_span_id": mock.ANY, "trace_id": mock.ANY, "origin": "auto.function.aws_lambda", "data": mock.ANY, + "status": "ok", } @@ -178,7 +178,6 @@ def test_basic_exception(lambda_client, test_environment): } assert error_event["contexts"]["trace"] == { "op": "function.aws", - "description": mock.ANY, "span_id": mock.ANY, "parent_span_id": mock.ANY, "trace_id": mock.ANY, @@ -217,9 +216,7 @@ def test_timeout_error(lambda_client, test_environment): (exception,) = error_event["exception"]["values"] assert not exception["mechanism"]["handled"] assert exception["type"] == "ServerlessTimeoutWarning" - assert exception["value"].startswith( - "WARNING : Function is expected to get timed out. Configured timeout duration =" - ) + assert exception["value"] == "WARNING: Function is about to time out." assert exception["mechanism"]["type"] == "threading" @@ -314,9 +311,7 @@ def test_non_dict_event( "headers": {"Host": "x1.io", "X-Forwarded-Proto": "https"}, "method": "GET", "url": "https://x1.io/1", - "query_string": { - "done": "f", - }, + "query_string": "done=f", } else: request_data = {"url": "awslambda:///BasicException"} @@ -325,10 +320,10 @@ def test_non_dict_event( assert transaction_event["request"] == request_data if batch_size > 1: - assert error_event["tags"]["batch_size"] == batch_size - assert error_event["tags"]["batch_request"] is True - assert transaction_event["tags"]["batch_size"] == batch_size - assert transaction_event["tags"]["batch_request"] is True + assert error_event["tags"]["batch_size"] == str(batch_size) + assert error_event["tags"]["batch_request"] == "True" + assert transaction_event["tags"]["batch_size"] == str(batch_size) + assert transaction_event["tags"]["batch_request"] == "True" def test_request_data(lambda_client, test_environment): @@ -343,7 +338,8 @@ def test_request_data(lambda_client, test_environment): "X-Forwarded-Proto": "https" }, "queryStringParameters": { - "bonkers": "true" + "bonkers": "true", + "wild": "false" }, "pathParameters": null, "stageVariables": null, @@ -373,7 +369,7 @@ def test_request_data(lambda_client, test_environment): "X-Forwarded-Proto": "https", }, "method": "GET", - "query_string": {"bonkers": "true"}, + "query_string": "bonkers=true&wild=false", "url": "https://iwsz2c7uwi.execute-api.us-east-1.amazonaws.com/asd", } @@ -457,7 +453,19 @@ def test_traces_sampler_has_correct_sampling_context(lambda_client, test_environ Test that aws_event and aws_context are passed in the custom_sampling_context when using the AWS Lambda integration. """ - test_payload = {"test_key": "test_value"} + test_payload = { + "test_key": "test_value", + "httpMethod": "GET", + "queryStringParameters": { + "test_query_param": "test_query_value", + }, + "path": "/test", + "headers": { + "X-Forwarded-Proto": "https", + "Host": "example.com", + "X-Bla": "blabla", + }, + } response = lambda_client.invoke( FunctionName="TracesSampler", Payload=json.dumps(test_payload), @@ -466,9 +474,28 @@ def test_traces_sampler_has_correct_sampling_context(lambda_client, test_environ sampling_context_data = json.loads(response_payload["body"])[ "sampling_context_data" ] - assert sampling_context_data.get("aws_event_present") is True - assert sampling_context_data.get("aws_context_present") is True - assert sampling_context_data.get("event_data", {}).get("test_key") == "test_value" + + assert sampling_context_data == { + "transaction_context": { + "name": "TracesSampler", + "op": "function.aws", + "source": "component", + }, + "http.request.method": "GET", + "url.query": "test_query_param=test_query_value", + "url.path": "/test", + "url.full": "https://example.com/test?test_query_param=test_query_value", + "network.protocol.name": "https", + "server.address": "example.com", + "faas.name": "TracesSampler", + "http.request.header.x-forwarded-proto": "https", + "http.request.header.host": "example.com", + "http.request.header.x-bla": "blabla", + "sentry.op": "function.aws", + "sentry.source": "component", + "parent_sampled": None, + "cloud.provider": "aws", + } @pytest.mark.parametrize( diff --git a/tests/integrations/aws_lambda/utils.py b/tests/integrations/aws_lambda/utils.py index d20c9352e7..3d590390ae 100644 --- a/tests/integrations/aws_lambda/utils.py +++ b/tests/integrations/aws_lambda/utils.py @@ -211,7 +211,7 @@ def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: ) @classmethod - def wait_for_stack(cls, timeout=60, port=SAM_PORT): + def wait_for_stack(cls, timeout=60, port=SAM_PORT, log_file=None): """ Wait for SAM to be ready, with timeout. """ @@ -219,8 +219,8 @@ def wait_for_stack(cls, timeout=60, port=SAM_PORT): while True: if time.time() - start_time > timeout: raise TimeoutError( - "AWS SAM failed to start within %s seconds. (Maybe Docker is not running?)" - % timeout + "AWS SAM failed to start within %s seconds. (Maybe Docker is not running, or new docker images could not be built in time?) Check the log for more details: %s" + % (timeout, log_file) ) try: diff --git a/tests/integrations/boto3/test_s3.py b/tests/integrations/boto3/test_s3.py index 97a1543b0f..71dc5ccc07 100644 --- a/tests/integrations/boto3/test_s3.py +++ b/tests/integrations/boto3/test_s3.py @@ -21,7 +21,7 @@ def test_basic(sentry_init, capture_events): events = capture_events() s3 = session.resource("s3") - with sentry_sdk.start_transaction() as transaction, MockResponse( + with sentry_sdk.start_span() as transaction, MockResponse( s3.meta.client, 200, {}, read_fixture("s3_list.xml") ): bucket = s3.Bucket("bucket") @@ -39,12 +39,43 @@ def test_basic(sentry_init, capture_events): assert span["description"] == "aws.s3.ListObjects" +def test_breadcrumb(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()]) + events = capture_events() + + try: + s3 = session.resource("s3") + with sentry_sdk.start_span(), MockResponse( + s3.meta.client, 200, {}, read_fixture("s3_list.xml") + ): + bucket = s3.Bucket("bucket") + # read bucket (this makes http request) + [obj for obj in bucket.objects.all()] + 1 / 0 + except Exception as e: + sentry_sdk.capture_exception(e) + + (_, event) = events + crumb = event["breadcrumbs"]["values"][0] + assert crumb == { + "type": "http", + "category": "httplib", + "data": { + "http.method": "GET", + "aws.request.url": "https://bucket.s3.amazonaws.com/", + "http.query": "encoding-type=url", + "http.fragment": "", + }, + "timestamp": mock.ANY, + } + + def test_streaming(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()]) events = capture_events() s3 = session.resource("s3") - with sentry_sdk.start_transaction() as transaction, MockResponse( + with sentry_sdk.start_span() as transaction, MockResponse( s3.meta.client, 200, {}, b"hello" ): obj = s3.Bucket("bucket").Object("foo.pdf") @@ -82,7 +113,7 @@ def test_streaming_close(sentry_init, capture_events): events = capture_events() s3 = session.resource("s3") - with sentry_sdk.start_transaction() as transaction, MockResponse( + with sentry_sdk.start_span() as transaction, MockResponse( s3.meta.client, 200, {}, b"hello" ): obj = s3.Bucket("bucket").Object("foo.pdf") @@ -111,7 +142,7 @@ def test_omit_url_data_if_parsing_fails(sentry_init, capture_events): "sentry_sdk.integrations.boto3.parse_url", side_effect=ValueError, ): - with sentry_sdk.start_transaction() as transaction, MockResponse( + with sentry_sdk.start_span() as transaction, MockResponse( s3.meta.client, 200, {}, read_fixture("s3_list.xml") ): bucket = s3.Bucket("bucket") @@ -139,7 +170,7 @@ def test_span_origin(sentry_init, capture_events): events = capture_events() s3 = session.resource("s3") - with sentry_sdk.start_transaction(), MockResponse( + with sentry_sdk.start_span(), MockResponse( s3.meta.client, 200, {}, read_fixture("s3_list.xml") ): bucket = s3.Bucket("bucket") diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index ce2e693143..5f36152af0 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -5,9 +5,11 @@ import pytest from celery import Celery, VERSION from celery.bin import worker +from celery.app.task import Task +from opentelemetry import trace as otel_trace, context import sentry_sdk -from sentry_sdk import start_transaction, get_current_span +from sentry_sdk import get_current_span from sentry_sdk.integrations.celery import ( CeleryIntegration, _wrap_task_run, @@ -126,14 +128,14 @@ def dummy_task(x, y): foo = 42 # noqa return x / y - with start_transaction(op="unit test transaction") as transaction: + with sentry_sdk.start_span(op="unit test transaction") as root_span: celery_invocation(dummy_task, 1, 2) _, expected_context = celery_invocation(dummy_task, 1, 0) (_, error_event, _, _) = events - assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id - assert error_event["contexts"]["trace"]["span_id"] != transaction.span_id + assert error_event["contexts"]["trace"]["trace_id"] == root_span.trace_id + assert error_event["contexts"]["trace"]["span_id"] != root_span.span_id assert error_event["transaction"] == "dummy_task" assert "celery_task_id" in error_event["tags"] assert error_event["extra"]["celery-job"] == dict( @@ -190,17 +192,14 @@ def test_transaction_events(capture_events, init_celery, celery_invocation, task def dummy_task(x, y): return x / y - # XXX: For some reason the first call does not get instrumented properly. - celery_invocation(dummy_task, 1, 1) - events = capture_events() - with start_transaction(name="submission") as transaction: + with sentry_sdk.start_span(name="submission") as root_span: celery_invocation(dummy_task, 1, 0 if task_fails else 1) if task_fails: error_event = events.pop(0) - assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id + assert error_event["contexts"]["trace"]["trace_id"] == root_span.trace_id assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError" execution_event, submission_event = events @@ -211,8 +210,8 @@ def dummy_task(x, y): assert submission_event["transaction_info"] == {"source": "custom"} assert execution_event["type"] == submission_event["type"] == "transaction" - assert execution_event["contexts"]["trace"]["trace_id"] == transaction.trace_id - assert submission_event["contexts"]["trace"]["trace_id"] == transaction.trace_id + assert execution_event["contexts"]["trace"]["trace_id"] == root_span.trace_id + assert submission_event["contexts"]["trace"]["trace_id"] == root_span.trace_id if task_fails: assert execution_event["contexts"]["trace"]["status"] == "internal_error" @@ -220,28 +219,32 @@ def dummy_task(x, y): assert execution_event["contexts"]["trace"]["status"] == "ok" assert len(execution_event["spans"]) == 1 - assert ( - execution_event["spans"][0].items() - >= { - "trace_id": str(transaction.trace_id), - "same_process_as_parent": True, + assert execution_event["spans"][0] == ApproxDict( + { + "trace_id": str(root_span.trace_id), "op": "queue.process", "description": "dummy_task", - "data": ApproxDict(), - }.items() + } ) assert submission_event["spans"] == [ { - "data": ApproxDict(), + "data": { + "sentry.name": "dummy_task", + "sentry.op": "queue.submit.celery", + "sentry.origin": "auto.queue.celery", + "sentry.source": "custom", + "thread.id": mock.ANY, + "thread.name": mock.ANY, + }, "description": "dummy_task", "op": "queue.submit.celery", "origin": "auto.queue.celery", "parent_span_id": submission_event["contexts"]["trace"]["span_id"], - "same_process_as_parent": True, "span_id": submission_event["spans"][0]["span_id"], "start_timestamp": submission_event["spans"][0]["start_timestamp"], "timestamp": submission_event["spans"][0]["timestamp"], - "trace_id": str(transaction.trace_id), + "trace_id": str(root_span.trace_id), + "status": "ok", } ] @@ -284,11 +287,11 @@ def test_simple_no_propagation(capture_events, init_celery): def dummy_task(): 1 / 0 - with start_transaction() as transaction: + with sentry_sdk.start_span(name="task") as root_span: dummy_task.delay() (event,) = events - assert event["contexts"]["trace"]["trace_id"] != transaction.trace_id + assert event["contexts"]["trace"]["trace_id"] != root_span.trace_id assert event["transaction"] == "dummy_task" (exception,) = event["exception"]["values"] assert exception["type"] == "ZeroDivisionError" @@ -359,7 +362,7 @@ def dummy_task(self): runs.append(1) 1 / 0 - with start_transaction(name="submit_celery"): + with sentry_sdk.start_span(name="submit_celery"): # Curious: Cannot use delay() here or py2.7-celery-4.2 crashes res = dummy_task.apply_async() @@ -439,7 +442,7 @@ def dummy_task(self, x, y): def test_traces_sampler_gets_task_info_in_sampling_context( - init_celery, celery_invocation, DictionaryContaining # noqa:N803 + init_celery, celery_invocation ): traces_sampler = mock.Mock() celery = init_celery(traces_sampler=traces_sampler) @@ -454,11 +457,12 @@ def walk_dogs(x, y): walk_dogs, [["Maisey", "Charlie", "Bodhi", "Cory"], "Dog park round trip"], 1 ) - traces_sampler.assert_any_call( - # depending on the iteration of celery_invocation, the data might be - # passed as args or as kwargs, so make this generic - DictionaryContaining({"celery_job": dict(task="dog_walk", **args_kwargs)}) - ) + sampling_context = traces_sampler.call_args_list[0][0][0] + assert sampling_context["celery.job.task"] == "dog_walk" + for i, arg in enumerate(args_kwargs["args"]): + assert sampling_context[f"celery.job.args.{i}"] == str(arg) + for kwarg, value in args_kwargs["kwargs"].items(): + assert sampling_context[f"celery.job.kwargs.{kwarg}"] == str(value) def test_abstract_task(capture_events, celery, celery_invocation): @@ -477,7 +481,7 @@ def __call__(self, *args, **kwargs): def dummy_task(x, y): return x / y - with start_transaction(): + with sentry_sdk.start_span(name="celery"): celery_invocation(dummy_task, 1, 0) assert not events @@ -518,9 +522,8 @@ def test_baggage_propagation(init_celery): def dummy_task(self, x, y): return _get_headers(self) - # patch random.uniform to return a predictable sample_rand value with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.5): - with start_transaction() as transaction: + with sentry_sdk.start_span(name="task") as root_span: result = dummy_task.apply_async( args=(1, 0), headers={"baggage": "custom=value"}, @@ -529,7 +532,8 @@ def dummy_task(self, x, y): assert sorted(result["baggage"].split(",")) == sorted( [ "sentry-release=abcdef", - "sentry-trace_id={}".format(transaction.trace_id), + "sentry-trace_id={}".format(root_span.trace_id), + "sentry-transaction=task", "sentry-environment=production", "sentry-sample_rand=0.500000", "sentry-sample_rate=1.0", @@ -548,26 +552,42 @@ def test_sentry_propagate_traces_override(init_celery): propagate_traces=True, traces_sample_rate=1.0, release="abcdef" ) + # Since we're applying the task inline eagerly, + # we need to cleanup the otel context for this test. + # and since we patch build_tracer, we need to do this before that runs... + # TODO: the right way is to not test this inline + original_apply = Task.apply + + def cleaned_apply(*args, **kwargs): + token = context.attach(otel_trace.set_span_in_context(otel_trace.INVALID_SPAN)) + rv = original_apply(*args, **kwargs) + context.detach(token) + return rv + + Task.apply = cleaned_apply + @celery.task(name="dummy_task", bind=True) def dummy_task(self, message): trace_id = get_current_span().trace_id return trace_id - with start_transaction() as transaction: - transaction_trace_id = transaction.trace_id + with sentry_sdk.start_span(name="task") as root_span: + root_span_trace_id = root_span.trace_id # should propagate trace - task_transaction_id = dummy_task.apply_async( + task_trace_id = dummy_task.apply_async( args=("some message",), ).get() - assert transaction_trace_id == task_transaction_id + assert root_span_trace_id == task_trace_id, "Trace should be propagated" # should NOT propagate trace (overrides `propagate_traces` parameter in integration constructor) - task_transaction_id = dummy_task.apply_async( + task_trace_id = dummy_task.apply_async( args=("another message",), headers={"sentry-propagate-traces": False}, ).get() - assert transaction_trace_id != task_transaction_id + assert root_span_trace_id != task_trace_id, "Trace should NOT be propagated" + + Task.apply = original_apply def test_apply_async_manually_span(sentry_init): @@ -604,7 +624,7 @@ def example_task(): def test_messaging_destination_name_default_exchange( mock_request, routing_key, init_celery, capture_events ): - celery_app = init_celery(enable_tracing=True) + celery_app = init_celery(traces_sample_rate=1.0) events = capture_events() mock_request.delivery_info = {"routing_key": routing_key, "exchange": ""} @@ -628,7 +648,7 @@ def test_messaging_destination_name_nondefault_exchange( that the routing key is the queue name. Other exchanges may not guarantee this behavior. """ - celery_app = init_celery(enable_tracing=True) + celery_app = init_celery(traces_sample_rate=1.0) events = capture_events() mock_request.delivery_info = {"routing_key": "celery", "exchange": "custom"} @@ -643,7 +663,7 @@ def task(): ... def test_messaging_id(init_celery, capture_events): - celery = init_celery(enable_tracing=True) + celery = init_celery(traces_sample_rate=1.0) events = capture_events() @celery.task @@ -657,7 +677,7 @@ def example_task(): ... def test_retry_count_zero(init_celery, capture_events): - celery = init_celery(enable_tracing=True) + celery = init_celery(traces_sample_rate=1.0) events = capture_events() @celery.task() @@ -674,7 +694,7 @@ def task(): ... def test_retry_count_nonzero(mock_request, init_celery, capture_events): mock_request.retries = 3 - celery = init_celery(enable_tracing=True) + celery = init_celery(traces_sample_rate=1.0) events = capture_events() @celery.task() @@ -689,7 +709,7 @@ def task(): ... @pytest.mark.parametrize("system", ("redis", "amqp")) def test_messaging_system(system, init_celery, capture_events): - celery = init_celery(enable_tracing=True) + celery = init_celery(traces_sample_rate=1.0) events = capture_events() # Does not need to be a real URL, since we use always eager @@ -714,14 +734,14 @@ def publish(*args, **kwargs): monkeypatch.setattr(kombu.messaging.Producer, "_publish", publish) - sentry_init(integrations=[CeleryIntegration()], enable_tracing=True) + sentry_init(integrations=[CeleryIntegration()], traces_sample_rate=1.0) celery = Celery(__name__, broker=f"{system}://example.com") # noqa: E231 events = capture_events() @celery.task() def task(): ... - with start_transaction(): + with sentry_sdk.start_span(name="task"): task.apply_async() (event,) = events @@ -752,7 +772,7 @@ def task(): ... def tests_span_origin_consumer(init_celery, capture_events): - celery = init_celery(enable_tracing=True) + celery = init_celery(traces_sample_rate=1.0) celery.conf.broker_url = "redis://example.com" # noqa: E231 events = capture_events() @@ -776,7 +796,7 @@ def publish(*args, **kwargs): monkeypatch.setattr(kombu.messaging.Producer, "_publish", publish) - sentry_init(integrations=[CeleryIntegration()], enable_tracing=True) + sentry_init(integrations=[CeleryIntegration()], traces_sample_rate=1.0) celery = Celery(__name__, broker="redis://example.com") # noqa: E231 events = capture_events() @@ -784,7 +804,7 @@ def publish(*args, **kwargs): @celery.task() def task(): ... - with start_transaction(name="custom_transaction"): + with sentry_sdk.start_span(name="custom_transaction"): task.apply_async() (event,) = events @@ -805,12 +825,12 @@ def test_send_task_wrapped( capture_events, reset_integrations, ): - sentry_init(integrations=[CeleryIntegration()], enable_tracing=True) + sentry_init(integrations=[CeleryIntegration()], traces_sample_rate=1.0) celery = Celery(__name__, broker="redis://example.com") # noqa: E231 events = capture_events() - with sentry_sdk.start_transaction(name="custom_transaction"): + with sentry_sdk.start_span(name="custom_transaction"): celery.send_task("very_creative_task_name", args=(1, 2), kwargs={"foo": "bar"}) (call,) = patched_send_task.call_args_list # We should have exactly one call diff --git a/tests/integrations/celery/test_update_celery_task_headers.py b/tests/integrations/celery/test_update_celery_task_headers.py index 705c00de58..5b76bee076 100644 --- a/tests/integrations/celery/test_update_celery_task_headers.py +++ b/tests/integrations/celery/test_update_celery_task_headers.py @@ -7,6 +7,7 @@ from sentry_sdk.integrations.celery import _update_celery_task_headers import sentry_sdk from sentry_sdk.tracing_utils import Baggage +from tests.conftest import SortedBaggage BAGGAGE_VALUE = ( @@ -71,11 +72,11 @@ def test_monitor_beat_tasks_with_headers(monitor_beat_tasks): def test_span_with_transaction(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) headers = {} monitor_beat_tasks = False - with sentry_sdk.start_transaction(name="test_transaction") as transaction: + with sentry_sdk.start_span(name="test_transaction") as transaction: with sentry_sdk.start_span(op="test_span") as span: outgoing_headers = _update_celery_task_headers( headers, span, monitor_beat_tasks @@ -83,21 +84,22 @@ def test_span_with_transaction(sentry_init): assert outgoing_headers["sentry-trace"] == span.to_traceparent() assert outgoing_headers["headers"]["sentry-trace"] == span.to_traceparent() - assert outgoing_headers["baggage"] == transaction.get_baggage().serialize() - assert ( - outgoing_headers["headers"]["baggage"] - == transaction.get_baggage().serialize() + assert outgoing_headers["baggage"] == SortedBaggage( + transaction.get_baggage().serialize() + ) + assert outgoing_headers["headers"]["baggage"] == SortedBaggage( + transaction.get_baggage().serialize() ) def test_span_with_transaction_custom_headers(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) headers = { "baggage": BAGGAGE_VALUE, "sentry-trace": SENTRY_TRACE_VALUE, } - with sentry_sdk.start_transaction(name="test_transaction") as transaction: + with sentry_sdk.start_span(name="test_transaction") as transaction: with sentry_sdk.start_span(op="test_span") as span: outgoing_headers = _update_celery_task_headers(headers, span, False) @@ -117,11 +119,11 @@ def test_span_with_transaction_custom_headers(sentry_init): if x is not None and x != "" ] ) - assert outgoing_headers["baggage"] == combined_baggage.serialize( - include_third_party=True + assert outgoing_headers["baggage"] == SortedBaggage( + combined_baggage.serialize(include_third_party=True) ) - assert outgoing_headers["headers"]["baggage"] == combined_baggage.serialize( - include_third_party=True + assert outgoing_headers["headers"]["baggage"] == SortedBaggage( + combined_baggage.serialize(include_third_party=True) ) @@ -190,39 +192,3 @@ def test_celery_trace_propagation_traces_sample_rate( else: assert "sentry-monitor-start-timestamp-s" not in outgoing_headers assert "sentry-monitor-start-timestamp-s" not in outgoing_headers["headers"] - - -@pytest.mark.parametrize( - "enable_tracing,monitor_beat_tasks", - list(itertools.product([None, True, False], [True, False])), -) -def test_celery_trace_propagation_enable_tracing( - sentry_init, enable_tracing, monitor_beat_tasks -): - """ - The celery integration does not check the traces_sample_rate. - By default traces_sample_rate is None which means "do not propagate traces". - But the celery integration does not check this value. - The Celery integration has its own mechanism to propagate traces: - https://docs.sentry.io/platforms/python/integrations/celery/#distributed-traces - """ - sentry_init(enable_tracing=enable_tracing) - - headers = {} - span = None - - scope = sentry_sdk.get_isolation_scope() - - outgoing_headers = _update_celery_task_headers(headers, span, monitor_beat_tasks) - - assert outgoing_headers["sentry-trace"] == scope.get_traceparent() - assert outgoing_headers["headers"]["sentry-trace"] == scope.get_traceparent() - assert outgoing_headers["baggage"] == scope.get_baggage().serialize() - assert outgoing_headers["headers"]["baggage"] == scope.get_baggage().serialize() - - if monitor_beat_tasks: - assert "sentry-monitor-start-timestamp-s" in outgoing_headers - assert "sentry-monitor-start-timestamp-s" in outgoing_headers["headers"] - else: - assert "sentry-monitor-start-timestamp-s" not in outgoing_headers - assert "sentry-monitor-start-timestamp-s" not in outgoing_headers["headers"] diff --git a/tests/integrations/chalice/test_chalice.py b/tests/integrations/chalice/test_chalice.py index fbd4be4e59..27dcd431ce 100644 --- a/tests/integrations/chalice/test_chalice.py +++ b/tests/integrations/chalice/test_chalice.py @@ -10,11 +10,10 @@ from pytest_chalice.handlers import RequestHandler -def _generate_lambda_context(self): +def _generate_lambda_context(self) -> LambdaContext: # Monkeypatch of the function _generate_lambda_context # from the class LocalGateway # for mock the timeout - # type: () -> LambdaContext if self._config.lambda_timeout is None: timeout = 10 * 1000 else: diff --git a/tests/integrations/clickhouse_driver/test_clickhouse_driver.py b/tests/integrations/clickhouse_driver/test_clickhouse_driver.py index 0675ad9ff5..47131f2d3d 100644 --- a/tests/integrations/clickhouse_driver/test_clickhouse_driver.py +++ b/tests/integrations/clickhouse_driver/test_clickhouse_driver.py @@ -1,14 +1,14 @@ """ Tests need a local clickhouse instance running, this can best be done using ```sh -docker run -d -p 18123:8123 -p9000:9000 --name clickhouse-test --ulimit nofile=262144:262144 --rm clickhouse/clickhouse-server +docker run -d -e CLICKHOUSE_SKIP_USER_SETUP=1 -p 8123:8123 -p 9000:9000 --name clickhouse-test --ulimit nofile=262144:262144 --rm clickhouse ``` """ import clickhouse_driver from clickhouse_driver import Client, connect -from sentry_sdk import start_transaction, capture_message +from sentry_sdk import start_span, capture_message from sentry_sdk.integrations.clickhouse_driver import ClickhouseDriverIntegration from tests.conftest import ApproxDict @@ -233,7 +233,7 @@ def test_clickhouse_client_spans( transaction_trace_id = None transaction_span_id = None - with start_transaction(name="test_clickhouse_transaction") as transaction: + with start_span(name="test_clickhouse_transaction") as transaction: transaction_trace_id = transaction.trace_id transaction_span_id = transaction.span_id @@ -256,13 +256,15 @@ def test_clickhouse_client_spans( "origin": "auto.db.clickhouse_driver", "description": "DROP TABLE IF EXISTS test", "data": { + "sentry.name": "DROP TABLE IF EXISTS test", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -271,13 +273,15 @@ def test_clickhouse_client_spans( "origin": "auto.db.clickhouse_driver", "description": "CREATE TABLE test (x Int32) ENGINE = Memory", "data": { + "sentry.name": "CREATE TABLE test (x Int32) ENGINE = Memory", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -286,13 +290,15 @@ def test_clickhouse_client_spans( "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -301,13 +307,15 @@ def test_clickhouse_client_spans( "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -316,13 +324,15 @@ def test_clickhouse_client_spans( "origin": "auto.db.clickhouse_driver", "description": "SELECT sum(x) FROM test WHERE x > 150", "data": { + "sentry.name": "SELECT sum(x) FROM test WHERE x > 150", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -338,13 +348,13 @@ def test_clickhouse_client_spans( span.pop("span_id", None) span.pop("start_timestamp", None) span.pop("timestamp", None) + span.pop("same_process_as_parent", None) + span.pop("status", None) assert event["spans"] == expected_spans -def test_clickhouse_client_spans_with_pii( - sentry_init, capture_events, capture_envelopes -) -> None: +def test_clickhouse_client_spans_with_pii(sentry_init, capture_events) -> None: sentry_init( integrations=[ClickhouseDriverIntegration()], _experiments={"record_sql_params": True}, @@ -356,7 +366,7 @@ def test_clickhouse_client_spans_with_pii( transaction_trace_id = None transaction_span_id = None - with start_transaction(name="test_clickhouse_transaction") as transaction: + with start_span(name="test_clickhouse_transaction") as transaction: transaction_trace_id = transaction.trace_id transaction_span_id = transaction.span_id @@ -379,14 +389,17 @@ def test_clickhouse_client_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "DROP TABLE IF EXISTS test", "data": { + "sentry.name": "DROP TABLE IF EXISTS test", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, + "db.query.text": "DROP TABLE IF EXISTS test", "db.result": [], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -395,14 +408,17 @@ def test_clickhouse_client_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "CREATE TABLE test (x Int32) ENGINE = Memory", "data": { + "sentry.name": "CREATE TABLE test (x Int32) ENGINE = Memory", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "CREATE TABLE test (x Int32) ENGINE = Memory", + "db.result": [], "server.address": "localhost", "server.port": 9000, - "db.result": [], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -411,14 +427,17 @@ def test_clickhouse_client_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "INSERT INTO test (x) VALUES", + "db.params": '[{"x": 100}]', "server.address": "localhost", "server.port": 9000, - "db.params": [{"x": 100}], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -427,14 +446,16 @@ def test_clickhouse_client_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "INSERT INTO test (x) VALUES", "server.address": "localhost", "server.port": 9000, - "db.params": [[170], [200]], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -443,15 +464,18 @@ def test_clickhouse_client_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "SELECT sum(x) FROM test WHERE x > 150", "data": { + "sentry.name": "SELECT sum(x) FROM test WHERE x > 150", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.params": '{"minv": 150}', + "db.query.text": "SELECT sum(x) FROM test WHERE x > 150", + "db.result": "[[370]]", "server.address": "localhost", "server.port": 9000, - "db.params": {"minv": 150}, - "db.result": [[370]], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -467,6 +491,8 @@ def test_clickhouse_client_spans_with_pii( span.pop("span_id", None) span.pop("start_timestamp", None) span.pop("timestamp", None) + span.pop("same_process_as_parent", None) + span.pop("status", None) assert event["spans"] == expected_spans @@ -681,7 +707,7 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) transaction_trace_id = None transaction_span_id = None - with start_transaction(name="test_clickhouse_transaction") as transaction: + with start_span(name="test_clickhouse_transaction") as transaction: transaction_trace_id = transaction.trace_id transaction_span_id = transaction.span_id @@ -704,13 +730,15 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) "origin": "auto.db.clickhouse_driver", "description": "DROP TABLE IF EXISTS test", "data": { + "sentry.name": "DROP TABLE IF EXISTS test", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -719,13 +747,15 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) "origin": "auto.db.clickhouse_driver", "description": "CREATE TABLE test (x Int32) ENGINE = Memory", "data": { + "sentry.name": "CREATE TABLE test (x Int32) ENGINE = Memory", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -734,13 +764,15 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -749,13 +781,15 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -764,13 +798,15 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) "origin": "auto.db.clickhouse_driver", "description": "SELECT sum(x) FROM test WHERE x > 150", "data": { + "sentry.name": "SELECT sum(x) FROM test WHERE x > 150", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -786,6 +822,7 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) span.pop("span_id", None) span.pop("start_timestamp", None) span.pop("timestamp", None) + span.pop("status", None) assert event["spans"] == expected_spans @@ -804,7 +841,7 @@ def test_clickhouse_dbapi_spans_with_pii( transaction_trace_id = None transaction_span_id = None - with start_transaction(name="test_clickhouse_transaction") as transaction: + with start_span(name="test_clickhouse_transaction") as transaction: transaction_trace_id = transaction.trace_id transaction_span_id = transaction.span_id @@ -827,14 +864,17 @@ def test_clickhouse_dbapi_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "DROP TABLE IF EXISTS test", "data": { + "sentry.name": "DROP TABLE IF EXISTS test", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "DROP TABLE IF EXISTS test", + "db.result": "[[], []]", "server.address": "localhost", "server.port": 9000, - "db.result": [[], []], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -843,14 +883,17 @@ def test_clickhouse_dbapi_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "CREATE TABLE test (x Int32) ENGINE = Memory", "data": { + "sentry.name": "CREATE TABLE test (x Int32) ENGINE = Memory", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "CREATE TABLE test (x Int32) ENGINE = Memory", + "db.result": "[[], []]", "server.address": "localhost", "server.port": 9000, - "db.result": [[], []], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -859,14 +902,17 @@ def test_clickhouse_dbapi_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "INSERT INTO test (x) VALUES", + "db.params": '[{"x": 100}]', "server.address": "localhost", "server.port": 9000, - "db.params": [{"x": 100}], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -875,14 +921,17 @@ def test_clickhouse_dbapi_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "INSERT INTO test (x) VALUES", + "db.params": "[[170], [200]]", "server.address": "localhost", "server.port": 9000, - "db.params": [[170], [200]], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -891,15 +940,18 @@ def test_clickhouse_dbapi_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "SELECT sum(x) FROM test WHERE x > 150", "data": { + "sentry.name": "SELECT sum(x) FROM test WHERE x > 150", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "SELECT sum(x) FROM test WHERE x > 150", + "db.params": '{"minv": 150}', + "db.result": '[[[370]], [["sum(x)", "Int64"]]]', "server.address": "localhost", "server.port": 9000, - "db.params": {"minv": 150}, - "db.result": [[[370]], [["sum(x)", "Int64"]]], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -915,6 +967,8 @@ def test_clickhouse_dbapi_spans_with_pii( span.pop("span_id", None) span.pop("start_timestamp", None) span.pop("timestamp", None) + span.pop("same_process_as_parent", None) + span.pop("status", None) assert event["spans"] == expected_spans @@ -927,7 +981,7 @@ def test_span_origin(sentry_init, capture_events, capture_envelopes) -> None: events = capture_events() - with start_transaction(name="test_clickhouse_transaction"): + with start_span(name="test_clickhouse_transaction"): conn = connect("clickhouse://localhost") cursor = conn.cursor() cursor.execute("SELECT 1") diff --git a/tests/integrations/cohere/test_cohere.py b/tests/integrations/cohere/test_cohere.py index b8b6067625..d5a846e8e1 100644 --- a/tests/integrations/cohere/test_cohere.py +++ b/tests/integrations/cohere/test_cohere.py @@ -4,7 +4,7 @@ import pytest from cohere import Client, ChatMessage -from sentry_sdk import start_transaction +from sentry_sdk import start_span from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.cohere import CohereIntegration @@ -42,7 +42,7 @@ def test_nonstreaming_chat( ) ) - with start_transaction(name="cohere tx"): + with start_span(name="cohere tx"): response = client.chat( model="some-model", chat_history=[ChatMessage(role="SYSTEM", message="some context")], @@ -116,7 +116,7 @@ def test_streaming_chat(sentry_init, capture_events, send_default_pii, include_p ) ) - with start_transaction(name="cohere tx"): + with start_span(name="cohere tx"): responses = list( client.chat_stream( model="some-model", @@ -197,7 +197,7 @@ def test_embed(sentry_init, capture_events, send_default_pii, include_prompts): ) ) - with start_transaction(name="cohere tx"): + with start_span(name="cohere tx"): response = client.embed(texts=["hello"], model="text-embedding-3-large") assert len(response.embeddings[0]) == 3 @@ -238,7 +238,7 @@ def test_span_origin_chat(sentry_init, capture_events): ) ) - with start_transaction(name="cohere tx"): + with start_span(name="cohere tx"): client.chat( model="some-model", chat_history=[ChatMessage(role="SYSTEM", message="some context")], @@ -276,7 +276,7 @@ def test_span_origin_embed(sentry_init, capture_events): ) ) - with start_transaction(name="cohere tx"): + with start_span(name="cohere tx"): client.embed(texts=["hello"], model="text-embedding-3-large") (event,) = events diff --git a/tests/integrations/conftest.py b/tests/integrations/conftest.py index 7ac43b0efe..ab0c096a55 100644 --- a/tests/integrations/conftest.py +++ b/tests/integrations/conftest.py @@ -6,19 +6,8 @@ def capture_exceptions(monkeypatch): def inner(): errors = set() - old_capture_event_hub = sentry_sdk.Hub.capture_event old_capture_event_scope = sentry_sdk.Scope.capture_event - def capture_event_hub(self, event, hint=None, scope=None): - """ - Can be removed when we remove push_scope and the Hub from the SDK. - """ - if hint: - if "exc_info" in hint: - error = hint["exc_info"][1] - errors.add(error) - return old_capture_event_hub(self, event, hint=hint, scope=scope) - def capture_event_scope(self, event, hint=None, scope=None): if hint: if "exc_info" in hint: @@ -26,7 +15,6 @@ def capture_event_scope(self, event, hint=None, scope=None): errors.add(error) return old_capture_event_scope(self, event, hint=hint, scope=scope) - monkeypatch.setattr(sentry_sdk.Hub, "capture_event", capture_event_hub) monkeypatch.setattr(sentry_sdk.Scope, "capture_event", capture_event_scope) return errors diff --git a/tests/integrations/django/asgi/test_asgi.py b/tests/integrations/django/asgi/test_asgi.py index 3c78ac3f38..550ccb1a91 100644 --- a/tests/integrations/django/asgi/test_asgi.py +++ b/tests/integrations/django/asgi/test_asgi.py @@ -338,9 +338,7 @@ async def test_has_trace_if_performance_enabled(sentry_init, capture_events): django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1" ) async def test_has_trace_if_performance_disabled(sentry_init, capture_events): - sentry_init( - integrations=[DjangoIntegration()], - ) + sentry_init(integrations=[DjangoIntegration()]) events = capture_events() @@ -403,9 +401,7 @@ async def test_trace_from_headers_if_performance_enabled(sentry_init, capture_ev django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1" ) async def test_trace_from_headers_if_performance_disabled(sentry_init, capture_events): - sentry_init( - integrations=[DjangoIntegration()], - ) + sentry_init(integrations=[DjangoIntegration()]) events = capture_events() @@ -674,7 +670,12 @@ async def test_transaction_http_method_default( By default OPTIONS and HEAD requests do not create a transaction. """ sentry_init( - integrations=[DjangoIntegration()], + integrations=[ + DjangoIntegration( + middleware_spans=False, + signals_spans=False, + ), + ], traces_sample_rate=1.0, ) events = capture_events() @@ -706,6 +707,8 @@ async def test_transaction_http_method_custom(sentry_init, capture_events, appli sentry_init( integrations=[ DjangoIntegration( + middleware_spans=False, + signals_spans=False, http_methods_to_capture=( "OPTIONS", "head", diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index e96cd09e4f..650e1a0bb6 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -10,8 +10,6 @@ from werkzeug.test import Client from django import VERSION as DJANGO_VERSION - -from django.contrib.auth.models import User from django.core.management import execute_from_command_line from django.db.utils import OperationalError, ProgrammingError, DataError from django.http.request import RawPostDataException @@ -294,6 +292,9 @@ def test_user_captured(sentry_init, client, capture_events): def test_queryset_repr(sentry_init, capture_events): sentry_init(integrations=[DjangoIntegration()]) events = capture_events() + + from django.contrib.auth.models import User + User.objects.create_user("john", "lennon@thebeatles.com", "johnpassword") try: @@ -317,6 +318,9 @@ def test_queryset_repr(sentry_init, capture_events): def test_context_nested_queryset_repr(sentry_init, capture_events): sentry_init(integrations=[DjangoIntegration()]) events = capture_events() + + from django.contrib.auth.models import User + User.objects.create_user("john", "lennon@thebeatles.com", "johnpassword") try: @@ -955,6 +959,11 @@ def test_render_spans(sentry_init, client, capture_events, render_span_tree): transaction = events[0] assert expected_line in render_span_tree(transaction) + render_span = next( + span for span in transaction["spans"] if span["op"] == "template.render" + ) + assert "context.user_age" in render_span["data"] + if DJANGO_VERSION >= (1, 10): EXPECTED_MIDDLEWARE_SPANS = """\ @@ -1140,6 +1149,9 @@ def test_csrf(sentry_init, client): assert content == b"ok" +# This test is forked because it doesn't clean up after itself properly and makes +# other tests fail to resolve routes +@pytest.mark.forked @pytest.mark.skipif(DJANGO_VERSION < (2, 0), reason="Requires Django > 2.0") def test_custom_urlconf_middleware( settings, sentry_init, client, capture_events, render_span_tree @@ -1229,14 +1241,19 @@ def test_transaction_http_method_default(sentry_init, client, capture_events): By default OPTIONS and HEAD requests do not create a transaction. """ sentry_init( - integrations=[DjangoIntegration()], + integrations=[ + DjangoIntegration( + middleware_spans=False, + signals_spans=False, + ) + ], traces_sample_rate=1.0, ) events = capture_events() - client.get("/nomessage") - client.options("/nomessage") - client.head("/nomessage") + client.get(reverse("nomessage")) + client.options(reverse("nomessage")) + client.head(reverse("nomessage")) (event,) = events @@ -1252,6 +1269,8 @@ def test_transaction_http_method_custom(sentry_init, client, capture_events): "OPTIONS", "head", ), # capitalization does not matter + middleware_spans=False, + signals_spans=False, ) ], traces_sample_rate=1.0, diff --git a/tests/integrations/django/test_cache_module.py b/tests/integrations/django/test_cache_module.py index 263f9f36f8..a8a6b745c8 100644 --- a/tests/integrations/django/test_cache_module.py +++ b/tests/integrations/django/test_cache_module.py @@ -511,7 +511,9 @@ def test_cache_spans_item_size(sentry_init, client, capture_events, use_django_c @pytest.mark.forked @pytest_mark_django_db_decorator() -def test_cache_spans_get_many(sentry_init, capture_events, use_django_caching): +def test_cache_spans_get_many( + sentry_init, capture_events, use_django_caching, render_span_tree +): sentry_init( integrations=[ DjangoIntegration( @@ -528,39 +530,35 @@ def test_cache_spans_get_many(sentry_init, capture_events, use_django_caching): from django.core.cache import cache - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="caches"): cache.get_many([f"S{id}", f"S{id+1}"]) cache.set(f"S{id}", "Sensitive1") cache.get_many([f"S{id}", f"S{id+1}"]) (transaction,) = events + assert transaction["transaction"] == "caches" assert len(transaction["spans"]) == 7 - assert transaction["spans"][0]["op"] == "cache.get" - assert transaction["spans"][0]["description"] == f"S{id}, S{id+1}" - - assert transaction["spans"][1]["op"] == "cache.get" - assert transaction["spans"][1]["description"] == f"S{id}" - - assert transaction["spans"][2]["op"] == "cache.get" - assert transaction["spans"][2]["description"] == f"S{id+1}" - - assert transaction["spans"][3]["op"] == "cache.put" - assert transaction["spans"][3]["description"] == f"S{id}" - - assert transaction["spans"][4]["op"] == "cache.get" - assert transaction["spans"][4]["description"] == f"S{id}, S{id+1}" - - assert transaction["spans"][5]["op"] == "cache.get" - assert transaction["spans"][5]["description"] == f"S{id}" - - assert transaction["spans"][6]["op"] == "cache.get" - assert transaction["spans"][6]["description"] == f"S{id+1}" + assert ( + render_span_tree(transaction) + == f"""\ +- op=null: description=null + - op="cache.get": description="S{id}, S{id+1}" + - op="cache.get": description="S{id}" + - op="cache.get": description="S{id+1}" + - op="cache.put": description="S{id}" + - op="cache.get": description="S{id}, S{id+1}" + - op="cache.get": description="S{id}" + - op="cache.get": description="S{id+1}"\ +""" # noqa: E221 + ) @pytest.mark.forked @pytest_mark_django_db_decorator() -def test_cache_spans_set_many(sentry_init, capture_events, use_django_caching): +def test_cache_spans_set_many( + sentry_init, capture_events, use_django_caching, render_span_tree +): sentry_init( integrations=[ DjangoIntegration( @@ -577,24 +575,24 @@ def test_cache_spans_set_many(sentry_init, capture_events, use_django_caching): from django.core.cache import cache - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="caches"): cache.set_many({f"S{id}": "Sensitive1", f"S{id+1}": "Sensitive2"}) cache.get(f"S{id}") (transaction,) = events + assert transaction["transaction"] == "caches" assert len(transaction["spans"]) == 4 - assert transaction["spans"][0]["op"] == "cache.put" - assert transaction["spans"][0]["description"] == f"S{id}, S{id+1}" - - assert transaction["spans"][1]["op"] == "cache.put" - assert transaction["spans"][1]["description"] == f"S{id}" - - assert transaction["spans"][2]["op"] == "cache.put" - assert transaction["spans"][2]["description"] == f"S{id+1}" - - assert transaction["spans"][3]["op"] == "cache.get" - assert transaction["spans"][3]["description"] == f"S{id}" + assert ( + render_span_tree(transaction) + == f"""\ +- op=null: description=null + - op="cache.put": description="S{id}, S{id+1}" + - op="cache.put": description="S{id}" + - op="cache.put": description="S{id+1}" + - op="cache.get": description="S{id}"\ +""" # noqa: E221 + ) @pytest.mark.forked diff --git a/tests/integrations/django/test_db_query_data.py b/tests/integrations/django/test_db_query_data.py index 41ad9d5e1c..82f1f339a6 100644 --- a/tests/integrations/django/test_db_query_data.py +++ b/tests/integrations/django/test_db_query_data.py @@ -1,6 +1,7 @@ import os import pytest +from contextlib import contextmanager from datetime import datetime from unittest import mock @@ -12,9 +13,10 @@ except ImportError: from django.core.urlresolvers import reverse +from freezegun import freeze_time from werkzeug.test import Client -from sentry_sdk import start_transaction +from sentry_sdk import start_span from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.django import DjangoIntegration from sentry_sdk.tracing_utils import record_sql_queries @@ -346,27 +348,24 @@ def test_no_query_source_if_duration_too_short(sentry_init, client, capture_even events = capture_events() - class fake_record_sql_queries: # noqa: N801 - def __init__(self, *args, **kwargs): - with record_sql_queries(*args, **kwargs) as span: - self.span = span - - self.span.start_timestamp = datetime(2024, 1, 1, microsecond=0) - self.span.timestamp = datetime(2024, 1, 1, microsecond=99999) - - def __enter__(self): - return self.span - - def __exit__(self, type, value, traceback): - pass + def fake_start_span(*args, **kwargs): # noqa: N801 + with freeze_time(datetime(2024, 1, 1, microsecond=0)): + return start_span(*args, **kwargs) - with mock.patch( - "sentry_sdk.integrations.django.record_sql_queries", - fake_record_sql_queries, - ): - _, status, _ = unpack_werkzeug_response( - client.get(reverse("postgres_select_orm")) - ) + @contextmanager + def fake_record_sql_queries(*args, **kwargs): # noqa: N801 + with freeze_time(datetime(2024, 1, 1, microsecond=99999)): + with record_sql_queries(*args, **kwargs) as span: + yield span + + with mock.patch("sentry_sdk.start_span", fake_start_span): + with mock.patch( + "sentry_sdk.integrations.django.record_sql_queries", + fake_record_sql_queries, + ): + _, status, _ = unpack_werkzeug_response( + client.get(reverse("postgres_select_orm")) + ) assert status == "200 OK" @@ -404,27 +403,24 @@ def test_query_source_if_duration_over_threshold(sentry_init, client, capture_ev events = capture_events() - class fake_record_sql_queries: # noqa: N801 - def __init__(self, *args, **kwargs): - with record_sql_queries(*args, **kwargs) as span: - self.span = span - - self.span.start_timestamp = datetime(2024, 1, 1, microsecond=0) - self.span.timestamp = datetime(2024, 1, 1, microsecond=101000) - - def __enter__(self): - return self.span - - def __exit__(self, type, value, traceback): - pass + def fake_start_span(*args, **kwargs): # noqa: N801 + with freeze_time(datetime(2024, 1, 1, microsecond=0)): + return start_span(*args, **kwargs) - with mock.patch( - "sentry_sdk.integrations.django.record_sql_queries", - fake_record_sql_queries, - ): - _, status, _ = unpack_werkzeug_response( - client.get(reverse("postgres_select_orm")) - ) + @contextmanager + def fake_record_sql_queries(*args, **kwargs): # noqa: N801 + with freeze_time(datetime(2024, 1, 1, microsecond=100001)): + with record_sql_queries(*args, **kwargs) as span: + yield span + + with mock.patch("sentry_sdk.start_span", fake_start_span): + with mock.patch( + "sentry_sdk.integrations.django.record_sql_queries", + fake_record_sql_queries, + ): + _, status, _ = unpack_werkzeug_response( + client.get(reverse("postgres_select_orm")) + ) assert status == "200 OK" @@ -500,7 +496,7 @@ def test_db_span_origin_executemany(sentry_init, client, capture_events): if "postgres" not in connections: pytest.skip("postgres tests disabled") - with start_transaction(name="test_transaction"): + with start_span(name="test_transaction"): from django.db import connection, transaction cursor = connection.cursor() diff --git a/tests/integrations/django/test_middleware.py b/tests/integrations/django/test_middleware.py index 2a8d94f623..6e5c1e76be 100644 --- a/tests/integrations/django/test_middleware.py +++ b/tests/integrations/django/test_middleware.py @@ -5,8 +5,7 @@ from sentry_sdk.integrations.django.middleware import _wrap_middleware -def _sync_capable_middleware_factory(sync_capable): - # type: (Optional[bool]) -> type +def _sync_capable_middleware_factory(sync_capable: Optional[bool]) -> type: """Create a middleware class with a sync_capable attribute set to the value passed to the factory. If the factory is called with None, the middleware class will not have a sync_capable attribute. """ diff --git a/tests/integrations/django/test_transactions.py b/tests/integrations/django/test_transactions.py index 14f8170fc3..0eaf99dc23 100644 --- a/tests/integrations/django/test_transactions.py +++ b/tests/integrations/django/test_transactions.py @@ -21,6 +21,7 @@ included_url_conf = ((re_path(r"^foo/bar/(?P[\w]+)", lambda x: ""),), "") from sentry_sdk.integrations.django.transactions import RavenResolver +from tests.integrations.django.myapp.wsgi import application # noqa: F401 example_url_conf = ( diff --git a/tests/integrations/fastapi/test_fastapi.py b/tests/integrations/fastapi/test_fastapi.py index 3d79da92cc..cc435a5e38 100644 --- a/tests/integrations/fastapi/test_fastapi.py +++ b/tests/integrations/fastapi/test_fastapi.py @@ -22,7 +22,6 @@ FASTAPI_VERSION = parse_version(fastapi.__version__) from tests.integrations.conftest import parametrize_test_configurable_status_codes -from tests.integrations.starlette import test_starlette def fastapi_app_factory(): @@ -530,48 +529,6 @@ def test_transaction_name_in_middleware( ) -@test_starlette.parametrize_test_configurable_status_codes_deprecated -def test_configurable_status_codes_deprecated( - sentry_init, - capture_events, - failed_request_status_codes, - status_code, - expected_error, -): - with pytest.warns(DeprecationWarning): - starlette_integration = StarletteIntegration( - failed_request_status_codes=failed_request_status_codes - ) - - with pytest.warns(DeprecationWarning): - fast_api_integration = FastApiIntegration( - failed_request_status_codes=failed_request_status_codes - ) - - sentry_init( - integrations=[ - starlette_integration, - fast_api_integration, - ] - ) - - events = capture_events() - - app = FastAPI() - - @app.get("/error") - async def _error(): - raise HTTPException(status_code) - - client = TestClient(app) - client.get("/error") - - if expected_error: - assert len(events) == 1 - else: - assert not events - - @pytest.mark.skipif( FASTAPI_VERSION < (0, 80), reason="Requires FastAPI >= 0.80, because earlier versions do not support HTTP 'HEAD' requests", diff --git a/tests/integrations/flask/test_flask.py b/tests/integrations/flask/test_flask.py index 49ee684797..969fff2379 100644 --- a/tests/integrations/flask/test_flask.py +++ b/tests/integrations/flask/test_flask.py @@ -293,7 +293,7 @@ def index(): try: raise ValueError("stuff") except Exception: - logging.exception("stuff happened") + sentry_sdk.capture_exception() 1 / 0 envelopes = capture_envelopes() @@ -774,12 +774,14 @@ def hi_tx(): assert transaction_event["type"] == "transaction" assert transaction_event["transaction"] == "hi_tx" + assert transaction_event["transaction_info"] == {"source": "component"} assert transaction_event["contexts"]["trace"]["status"] == "ok" assert transaction_event["tags"]["view"] == "yes" assert transaction_event["tags"]["before_request"] == "yes" assert message_event["message"] == "hi" assert message_event["transaction"] == "hi_tx" + assert message_event["transaction_info"] == {"source": "component"} assert message_event["tags"]["view"] == "yes" assert message_event["tags"]["before_request"] == "yes" @@ -896,7 +898,12 @@ def index(): def test_request_not_modified_by_reference(sentry_init, capture_events, app): - sentry_init(integrations=[flask_sentry.FlaskIntegration()]) + sentry_init( + integrations=[ + flask_sentry.FlaskIntegration(), + LoggingIntegration(event_level="ERROR"), + ] + ) @app.route("/", methods=["POST"]) def index(): diff --git a/tests/integrations/gcp/test_gcp.py b/tests/integrations/gcp/test_gcp.py index 22d104c817..e233fa4920 100644 --- a/tests/integrations/gcp/test_gcp.py +++ b/tests/integrations/gcp/test_gcp.py @@ -212,10 +212,7 @@ def cloud_function(functionhandler, event): (exception,) = envelope_items[0]["exception"]["values"] assert exception["type"] == "ServerlessTimeoutWarning" - assert ( - exception["value"] - == "WARNING : Function is expected to get timed out. Configured timeout duration = 3 seconds." - ) + assert exception["value"] == "WARNING: Function is about to time out." assert exception["mechanism"]["type"] == "threading" assert not exception["mechanism"]["handled"] @@ -293,35 +290,32 @@ def test_traces_sampler_gets_correct_values_in_sampling_context( dedent( """ functionhandler = None - event = { - "type": "chase", - "chasers": ["Maisey", "Charlie"], - "num_squirrels": 2, - } + + from collections import namedtuple + GCPEvent = namedtuple("GCPEvent", ["headers"]) + event = GCPEvent(headers={"Custom-Header": "Custom Value"}) + def cloud_function(functionhandler, event): # this runs after the transaction has started, which means we # can make assertions about traces_sampler try: traces_sampler.assert_any_call( DictionaryContaining({ - "gcp_env": DictionaryContaining({ - "function_name": "chase_into_tree", - "function_region": "dogpark", - "function_project": "SquirrelChasing", - }), - "gcp_event": { - "type": "chase", - "chasers": ["Maisey", "Charlie"], - "num_squirrels": 2, - }, + "faas.name": "chase_into_tree", + "faas.region": "dogpark", + "gcp.function.identity": "func_ID", + "gcp.function.entry_point": "cloud_function", + "gcp.function.project": "SquirrelChasing", + "cloud.provider": "gcp", + "http.request.header.custom-header": "Custom Value", }) ) except AssertionError: # catch the error and return it because the error itself will # get swallowed by the SDK as an "internal exception" - return {"AssertionError raised": True,} + return {"AssertionError raised": True} - return {"AssertionError raised": False,} + return {"AssertionError raised": False} """ ) + FUNCTIONS_PRELUDE diff --git a/tests/integrations/graphene/test_graphene.py b/tests/integrations/graphene/test_graphene.py index 5d54bb49cb..63bc5de5d2 100644 --- a/tests/integrations/graphene/test_graphene.py +++ b/tests/integrations/graphene/test_graphene.py @@ -207,7 +207,7 @@ def graphql_server_sync(): def test_graphql_span_holds_query_information(sentry_init, capture_events): sentry_init( integrations=[GrapheneIntegration(), FlaskIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, default_integrations=False, ) events = capture_events() diff --git a/tests/integrations/grpc/test_grpc.py b/tests/integrations/grpc/test_grpc.py index 8d2698f411..7d39e6b63f 100644 --- a/tests/integrations/grpc/test_grpc.py +++ b/tests/integrations/grpc/test_grpc.py @@ -5,7 +5,7 @@ from typing import List, Optional, Tuple from unittest.mock import Mock -from sentry_sdk import start_span, start_transaction +from sentry_sdk import start_span from sentry_sdk.consts import OP from sentry_sdk.integrations.grpc import GRPCIntegration from tests.conftest import ApproxDict @@ -50,7 +50,7 @@ def _tear_down(server: grpc.Server): @pytest.mark.forked -def test_grpc_server_starts_transaction(sentry_init, capture_events_forksafe): +def test_grpc_server_starts_root_span(sentry_init, capture_events_forksafe): sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()]) events = capture_events_forksafe() @@ -108,7 +108,7 @@ def test_grpc_server_other_interceptors(sentry_init, capture_events_forksafe): @pytest.mark.forked -def test_grpc_server_continues_transaction(sentry_init, capture_events_forksafe): +def test_grpc_server_continues_trace(sentry_init, capture_events_forksafe): sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()]) events = capture_events_forksafe() @@ -117,20 +117,20 @@ def test_grpc_server_continues_transaction(sentry_init, capture_events_forksafe) # Use the provided channel stub = gRPCTestServiceStub(channel) - with start_transaction() as transaction: + with start_span() as root_span: metadata = ( ( "baggage", "sentry-trace_id={trace_id},sentry-environment=test," "sentry-transaction=test-transaction,sentry-sample_rate=1.0".format( - trace_id=transaction.trace_id + trace_id=root_span.trace_id ), ), ( "sentry-trace", "{trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction.trace_id, - parent_span_id=transaction.span_id, + trace_id=root_span.trace_id, + parent_span_id=root_span.span_id, sampled=1, ), ), @@ -148,7 +148,7 @@ def test_grpc_server_continues_transaction(sentry_init, capture_events_forksafe) "source": "custom", } assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER - assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id + assert event["contexts"]["trace"]["trace_id"] == root_span.trace_id assert span["op"] == "test" @@ -162,17 +162,17 @@ def test_grpc_client_starts_span(sentry_init, capture_events_forksafe): # Use the provided channel stub = gRPCTestServiceStub(channel) - with start_transaction(): + with start_span(): stub.TestServe(gRPCTestMessage(text="test")) _tear_down(server=server) events.write_file.close() events.read_event() - local_transaction = events.read_event() - span = local_transaction["spans"][0] + local_root_span = events.read_event() + span = local_root_span["spans"][0] - assert len(local_transaction["spans"]) == 1 + assert len(local_root_span["spans"]) == 1 assert span["op"] == OP.GRPC_CLIENT assert ( span["description"] @@ -197,16 +197,16 @@ def test_grpc_client_unary_stream_starts_span(sentry_init, capture_events_forksa # Use the provided channel stub = gRPCTestServiceStub(channel) - with start_transaction(): + with start_span(): [el for el in stub.TestUnaryStream(gRPCTestMessage(text="test"))] _tear_down(server=server) events.write_file.close() - local_transaction = events.read_event() - span = local_transaction["spans"][0] + local_root_span = events.read_event() + span = local_root_span["spans"][0] - assert len(local_transaction["spans"]) == 1 + assert len(local_root_span["spans"]) == 1 assert span["op"] == OP.GRPC_CLIENT assert ( span["description"] @@ -242,7 +242,7 @@ def test_grpc_client_other_interceptor(sentry_init, capture_events_forksafe): channel = grpc.intercept_channel(channel, MockClientInterceptor()) stub = gRPCTestServiceStub(channel) - with start_transaction(): + with start_span(): stub.TestServe(gRPCTestMessage(text="test")) _tear_down(server=server) @@ -251,10 +251,10 @@ def test_grpc_client_other_interceptor(sentry_init, capture_events_forksafe): events.write_file.close() events.read_event() - local_transaction = events.read_event() - span = local_transaction["spans"][0] + local_root_span = events.read_event() + span = local_root_span["spans"][0] - assert len(local_transaction["spans"]) == 1 + assert len(local_root_span["spans"]) == 1 assert span["op"] == OP.GRPC_CLIENT assert ( span["description"] @@ -281,18 +281,18 @@ def test_grpc_client_and_servers_interceptors_integration( # Use the provided channel stub = gRPCTestServiceStub(channel) - with start_transaction(): + with start_span(): stub.TestServe(gRPCTestMessage(text="test")) _tear_down(server=server) events.write_file.close() - server_transaction = events.read_event() - local_transaction = events.read_event() + server_root_span = events.read_event() + local_root_span = events.read_event() assert ( - server_transaction["contexts"]["trace"]["trace_id"] - == local_transaction["contexts"]["trace"]["trace_id"] + server_root_span["contexts"]["trace"]["trace_id"] + == local_root_span["contexts"]["trace"]["trace_id"] ) @@ -337,26 +337,23 @@ def test_span_origin(sentry_init, capture_events_forksafe): # Use the provided channel stub = gRPCTestServiceStub(channel) - with start_transaction(name="custom_transaction"): + with start_span(name="custom_transaction"): stub.TestServe(gRPCTestMessage(text="test")) _tear_down(server=server) events.write_file.close() - transaction_from_integration = events.read_event() - custom_transaction = events.read_event() + root_span_from_integration = events.read_event() + custom_root_span = events.read_event() + assert root_span_from_integration["contexts"]["trace"]["origin"] == "auto.grpc.grpc" assert ( - transaction_from_integration["contexts"]["trace"]["origin"] == "auto.grpc.grpc" - ) - assert ( - transaction_from_integration["spans"][0]["origin"] - == "auto.grpc.grpc.TestService" + root_span_from_integration["spans"][0]["origin"] == "auto.grpc.grpc.TestService" ) # manually created in TestService, not the instrumentation - assert custom_transaction["contexts"]["trace"]["origin"] == "manual" - assert custom_transaction["spans"][0]["origin"] == "auto.grpc.grpc" + assert custom_root_span["contexts"]["trace"]["origin"] == "manual" + assert custom_root_span["spans"][0]["origin"] == "auto.grpc.grpc" class TestService(gRPCTestServiceServicer): diff --git a/tests/integrations/grpc/test_grpc_aio.py b/tests/integrations/grpc/test_grpc_aio.py index 96e9a4dba8..4f28f25345 100644 --- a/tests/integrations/grpc/test_grpc_aio.py +++ b/tests/integrations/grpc/test_grpc_aio.py @@ -5,7 +5,7 @@ import pytest_asyncio import sentry_sdk -from sentry_sdk import start_span, start_transaction +from sentry_sdk import start_span from sentry_sdk.consts import OP from sentry_sdk.integrations.grpc import GRPCIntegration from tests.conftest import ApproxDict @@ -103,20 +103,20 @@ async def test_grpc_server_continues_transaction( # Use the provided channel stub = gRPCTestServiceStub(channel) - with sentry_sdk.start_transaction() as transaction: + with sentry_sdk.start_span() as root_span: metadata = ( ( "baggage", "sentry-trace_id={trace_id},sentry-environment=test," "sentry-transaction=test-transaction,sentry-sample_rate=1.0".format( - trace_id=transaction.trace_id + trace_id=root_span.trace_id ), ), ( "sentry-trace", "{trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction.trace_id, - parent_span_id=transaction.span_id, + trace_id=root_span.trace_id, + parent_span_id=root_span.span_id, sampled=1, ), ), @@ -132,7 +132,7 @@ async def test_grpc_server_continues_transaction( "source": "custom", } assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER - assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id + assert event["contexts"]["trace"]["trace_id"] == root_span.trace_id assert span["op"] == "test" @@ -185,15 +185,15 @@ async def test_grpc_client_starts_span( # Use the provided channel stub = gRPCTestServiceStub(channel) - with start_transaction(): + with start_span(): await stub.TestServe(gRPCTestMessage(text="test")) events.write_file.close() events.read_event() - local_transaction = events.read_event() - span = local_transaction["spans"][0] + local_root_span = events.read_event() + span = local_root_span["spans"][0] - assert len(local_transaction["spans"]) == 1 + assert len(local_root_span["spans"]) == 1 assert span["op"] == OP.GRPC_CLIENT assert ( span["description"] @@ -217,15 +217,15 @@ async def test_grpc_client_unary_stream_starts_span( # Use the provided channel stub = gRPCTestServiceStub(channel) - with start_transaction(): + with start_span(): response = stub.TestUnaryStream(gRPCTestMessage(text="test")) [_ async for _ in response] events.write_file.close() - local_transaction = events.read_event() - span = local_transaction["spans"][0] + local_root_span = events.read_event() + span = local_root_span["spans"][0] - assert len(local_transaction["spans"]) == 1 + assert len(local_root_span["spans"]) == 1 assert span["op"] == OP.GRPC_CLIENT assert ( span["description"] @@ -275,24 +275,22 @@ async def test_span_origin(grpc_server_and_channel, capture_events_forksafe): # Use the provided channel stub = gRPCTestServiceStub(channel) - with start_transaction(name="custom_transaction"): + with start_span(name="custom_root_span"): await stub.TestServe(gRPCTestMessage(text="test")) events.write_file.close() - transaction_from_integration = events.read_event() - custom_transaction = events.read_event() + root_span_from_integration = events.read_event() + custom_root_span = events.read_event() + assert root_span_from_integration["contexts"]["trace"]["origin"] == "auto.grpc.grpc" assert ( - transaction_from_integration["contexts"]["trace"]["origin"] == "auto.grpc.grpc" - ) - assert ( - transaction_from_integration["spans"][0]["origin"] + root_span_from_integration["spans"][0]["origin"] == "auto.grpc.grpc.TestService.aio" ) # manually created in TestService, not the instrumentation - assert custom_transaction["contexts"]["trace"]["origin"] == "manual" - assert custom_transaction["spans"][0]["origin"] == "auto.grpc.grpc" + assert custom_root_span["contexts"]["trace"]["origin"] == "manual" + assert custom_root_span["spans"][0]["origin"] == "auto.grpc.grpc" class TestService(gRPCTestServiceServicer): diff --git a/tests/integrations/httpx/test_httpx.py b/tests/integrations/httpx/test_httpx.py index 5a35b68076..9e4b140f70 100644 --- a/tests/integrations/httpx/test_httpx.py +++ b/tests/integrations/httpx/test_httpx.py @@ -5,10 +5,10 @@ import pytest import sentry_sdk -from sentry_sdk import capture_message, start_transaction +from sentry_sdk import capture_message, start_span from sentry_sdk.consts import MATCH_ALL, SPANDATA from sentry_sdk.integrations.httpx import HttpxIntegration -from tests.conftest import ApproxDict +from tests.conftest import ApproxDict, SortedBaggage @pytest.mark.parametrize( @@ -26,7 +26,7 @@ def before_breadcrumb(crumb, hint): url = "http://example.com/" - with start_transaction(): + with start_span(): events = capture_events() if asyncio.iscoroutinefunction(httpx_client.get): @@ -64,8 +64,8 @@ def before_breadcrumb(crumb, hint): @pytest.mark.parametrize( "status_code,level", [ - (200, None), - (301, None), + (200, "info"), + (301, "info"), (403, "warning"), (405, "warning"), (500, "error"), @@ -80,7 +80,7 @@ def test_crumb_capture_client_error( url = "http://example.com/" - with start_transaction(): + with start_span(name="crumbs"): events = capture_events() if asyncio.iscoroutinefunction(httpx_client.get): @@ -98,12 +98,7 @@ def test_crumb_capture_client_error( crumb = event["breadcrumbs"]["values"][0] assert crumb["type"] == "http" assert crumb["category"] == "httplib" - - if level is None: - assert "level" not in crumb - else: - assert crumb["level"] == level - + assert crumb["level"] == level assert crumb["data"] == ApproxDict( { "url": url, @@ -119,7 +114,9 @@ def test_crumb_capture_client_error( "httpx_client", (httpx.Client(), httpx.AsyncClient()), ) -def test_outgoing_trace_headers(sentry_init, httpx_client, httpx_mock): +def test_outgoing_trace_headers( + sentry_init, httpx_client, capture_envelopes, httpx_mock +): httpx_mock.add_response() sentry_init( @@ -127,13 +124,14 @@ def test_outgoing_trace_headers(sentry_init, httpx_client, httpx_mock): integrations=[HttpxIntegration()], ) + envelopes = capture_envelopes() + url = "http://example.com/" - with start_transaction( + with start_span( name="/interactions/other-dogs/new-dog", op="greeting.sniff", - trace_id="01234567890123456789012345678901", - ) as transaction: + ): if asyncio.iscoroutinefunction(httpx_client.get): response = asyncio.get_event_loop().run_until_complete( httpx_client.get(url) @@ -141,14 +139,17 @@ def test_outgoing_trace_headers(sentry_init, httpx_client, httpx_mock): else: response = httpx_client.get(url) - request_span = transaction._span_recorder.spans[-1] - assert response.request.headers[ - "sentry-trace" - ] == "{trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction.trace_id, - parent_span_id=request_span.span_id, - sampled=1, - ) + (envelope,) = envelopes + transaction = envelope.get_transaction_event() + request_span = transaction["spans"][-1] + + assert response.request.headers[ + "sentry-trace" + ] == "{trace_id}-{parent_span_id}-{sampled}".format( + trace_id=transaction["contexts"]["trace"]["trace_id"], + parent_span_id=request_span["span_id"], + sampled=1, + ) @pytest.mark.parametrize( @@ -158,6 +159,7 @@ def test_outgoing_trace_headers(sentry_init, httpx_client, httpx_mock): def test_outgoing_trace_headers_append_to_baggage( sentry_init, httpx_client, + capture_envelopes, httpx_mock, ): httpx_mock.add_response() @@ -168,15 +170,15 @@ def test_outgoing_trace_headers_append_to_baggage( release="d08ebdb9309e1b004c6f52202de58a09c2268e42", ) + envelopes = capture_envelopes() + url = "http://example.com/" - # patch random.uniform to return a predictable sample_rand value with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.5): - with start_transaction( + with start_span( name="/interactions/other-dogs/new-dog", op="greeting.sniff", - trace_id="01234567890123456789012345678901", - ) as transaction: + ): if asyncio.iscoroutinefunction(httpx_client.get): response = asyncio.get_event_loop().run_until_complete( httpx_client.get(url, headers={"baGGage": "custom=data"}) @@ -184,18 +186,21 @@ def test_outgoing_trace_headers_append_to_baggage( else: response = httpx_client.get(url, headers={"baGGage": "custom=data"}) - request_span = transaction._span_recorder.spans[-1] - assert response.request.headers[ - "sentry-trace" - ] == "{trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction.trace_id, - parent_span_id=request_span.span_id, - sampled=1, - ) - assert ( - response.request.headers["baggage"] - == "custom=data,sentry-trace_id=01234567890123456789012345678901,sentry-sample_rand=0.500000,sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true" - ) + (envelope,) = envelopes + transaction = envelope.get_transaction_event() + request_span = transaction["spans"][-1] + trace_id = transaction["contexts"]["trace"]["trace_id"] + + assert response.request.headers[ + "sentry-trace" + ] == "{trace_id}-{parent_span_id}-{sampled}".format( + trace_id=trace_id, + parent_span_id=request_span["span_id"], + sampled=1, + ) + assert response.request.headers["baggage"] == SortedBaggage( + f"custom=data,sentry-trace_id={trace_id},sentry-sample_rand=0.500000,sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true" # noqa: E231 + ) @pytest.mark.parametrize( @@ -328,7 +333,7 @@ def test_option_trace_propagation_targets( integrations=[HttpxIntegration()], ) - with sentry_sdk.start_transaction(): # Must be in a transaction to propagate headers + with sentry_sdk.start_span(): # Must be in a root span to propagate headers if asyncio.iscoroutinefunction(httpx_client.get): asyncio.get_event_loop().run_until_complete(httpx_client.get(url)) else: @@ -342,7 +347,7 @@ def test_option_trace_propagation_targets( assert "sentry-trace" not in request_headers -def test_do_not_propagate_outside_transaction(sentry_init, httpx_mock): +def test_propagates_twp_outside_root_span(sentry_init, httpx_mock): httpx_mock.add_response() sentry_init( @@ -355,7 +360,8 @@ def test_do_not_propagate_outside_transaction(sentry_init, httpx_mock): httpx_client.get("http://example.com/") request_headers = httpx_mock.get_request().headers - assert "sentry-trace" not in request_headers + assert "sentry-trace" in request_headers + assert request_headers["sentry-trace"] == sentry_sdk.get_traceparent() @pytest.mark.tests_internal_exceptions @@ -408,7 +414,7 @@ def test_span_origin(sentry_init, capture_events, httpx_client, httpx_mock): url = "http://example.com/" - with start_transaction(name="test_transaction"): + with start_span(name="test_root_span"): if asyncio.iscoroutinefunction(httpx_client.get): asyncio.get_event_loop().run_until_complete(httpx_client.get(url)) else: diff --git a/tests/integrations/huey/test_huey.py b/tests/integrations/huey/test_huey.py index 143a369348..bdd5c2ca10 100644 --- a/tests/integrations/huey/test_huey.py +++ b/tests/integrations/huey/test_huey.py @@ -1,7 +1,7 @@ import pytest from decimal import DivisionByZero -from sentry_sdk import start_transaction +import sentry_sdk from sentry_sdk.integrations.huey import HueyIntegration from sentry_sdk.utils import parse_version @@ -160,7 +160,7 @@ def dummy_task(): events = capture_events() - with start_transaction() as transaction: + with sentry_sdk.start_span() as transaction: dummy_task() (event,) = events @@ -182,7 +182,7 @@ def test_huey_propagate_trace(init_huey, capture_events): def propagated_trace_task(): pass - with start_transaction() as outer_transaction: + with sentry_sdk.start_span() as outer_transaction: execute_huey_task(huey, propagated_trace_task) assert ( @@ -200,7 +200,7 @@ def dummy_task(): events = capture_events() - with start_transaction(): + with sentry_sdk.start_span(): dummy_task() (event,) = events diff --git a/tests/integrations/huggingface_hub/test_huggingface_hub.py b/tests/integrations/huggingface_hub/test_huggingface_hub.py index df0c6c6d76..f68c47a15c 100644 --- a/tests/integrations/huggingface_hub/test_huggingface_hub.py +++ b/tests/integrations/huggingface_hub/test_huggingface_hub.py @@ -7,7 +7,7 @@ ) from huggingface_hub.errors import OverloadedError -from sentry_sdk import start_transaction +from sentry_sdk import start_span from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.huggingface_hub import HuggingfaceHubIntegration @@ -55,7 +55,7 @@ def test_nonstreaming_chat_completion( ) mock_client_post(client, post_mock) - with start_transaction(name="huggingface_hub tx"): + with start_span(name="huggingface_hub tx"): response = client.text_generation( prompt="hello", details=details_arg, @@ -110,7 +110,7 @@ def test_streaming_chat_completion( ) mock_client_post(client, post_mock) - with start_transaction(name="huggingface_hub tx"): + with start_span(name="huggingface_hub tx"): response = list( client.text_generation( prompt="hello", @@ -172,7 +172,7 @@ def test_span_origin(sentry_init, capture_events): ) mock_client_post(client, post_mock) - with start_transaction(name="huggingface_hub tx"): + with start_span(name="huggingface_hub tx"): list( client.text_generation( prompt="hello", diff --git a/tests/integrations/langchain/test_langchain.py b/tests/integrations/langchain/test_langchain.py index 9d55a49f82..93118fef88 100644 --- a/tests/integrations/langchain/test_langchain.py +++ b/tests/integrations/langchain/test_langchain.py @@ -19,7 +19,7 @@ from langchain_core.runnables import RunnableConfig from langchain_core.language_models.chat_models import BaseChatModel -from sentry_sdk import start_transaction +from sentry_sdk import start_span from sentry_sdk.integrations.langchain import ( LangchainIntegration, SentryLangchainCallback, @@ -34,8 +34,8 @@ def get_word_length(word: str) -> int: return len(word) -global stream_result_mock # type: Mock -global llm_type # type: str +stream_result_mock: Mock +llm_type: str class MockOpenAI(ChatOpenAI): @@ -171,7 +171,7 @@ def test_langchain_agent( agent_executor = AgentExecutor(agent=agent, tools=[get_word_length], verbose=True) - with start_transaction(): + with start_span(name="agent"): list(agent_executor.stream({"input": "How many letters in the word eudca"})) tx = events[0] @@ -190,9 +190,10 @@ def test_langchain_agent( assert "gen_ai.usage.input_tokens" in chat_spans[0]["data"] assert "gen_ai.usage.total_tokens" in chat_spans[0]["data"] else: - # important: to avoid double counting, we do *not* measure + # important: to avoid double counting, we do *not* count # tokens used if we have an explicit integration (e.g. OpenAI) - assert "measurements" not in chat_spans[0] + assert "gen_ai.usage.input_tokens" not in chat_spans[0]["data"] + assert "gen_ai.usage.total_tokens" not in chat_spans[0]["data"] if send_default_pii and include_prompts: assert ( @@ -243,7 +244,7 @@ def test_langchain_error(sentry_init, capture_events): agent_executor = AgentExecutor(agent=agent, tools=[get_word_length], verbose=True) - with start_transaction(), pytest.raises(Exception): + with start_span(name="agent"), pytest.raises(Exception): list(agent_executor.stream({"input": "How many letters in the word eudca"})) error = events[0] @@ -338,7 +339,7 @@ def test_span_origin(sentry_init, capture_events): agent_executor = AgentExecutor(agent=agent, tools=[get_word_length], verbose=True) - with start_transaction(): + with start_span(name="agent"): list(agent_executor.stream({"input": "How many letters in the word eudca"})) (event,) = events diff --git a/tests/integrations/litestar/test_litestar.py b/tests/integrations/litestar/test_litestar.py index b064c17112..eb29acb50b 100644 --- a/tests/integrations/litestar/test_litestar.py +++ b/tests/integrations/litestar/test_litestar.py @@ -6,6 +6,7 @@ from sentry_sdk import capture_message from sentry_sdk.integrations.litestar import LitestarIntegration +from tests.conftest import ApproxDict from typing import Any @@ -205,7 +206,7 @@ def is_matching_span(expected_span, actual_span): return ( expected_span["op"] == actual_span["op"] and expected_span["description"] == actual_span["description"] - and expected_span["tags"] == actual_span["tags"] + and ApproxDict(expected_span["tags"]) == actual_span["tags"] ) actual_litestar_spans = list( @@ -301,7 +302,7 @@ def is_matching_span(expected_span, actual_span): return ( expected_span["op"] == actual_span["op"] and actual_span["description"].startswith(expected_span["description"]) - and expected_span["tags"] == actual_span["tags"] + and ApproxDict(expected_span["tags"]) == actual_span["tags"] ) actual_litestar_spans = list( diff --git a/tests/integrations/logging/test_logging.py b/tests/integrations/logging/test_logging.py index 7ecdf42500..67ea96cd28 100644 --- a/tests/integrations/logging/test_logging.py +++ b/tests/integrations/logging/test_logging.py @@ -18,43 +18,72 @@ def reset_level(): logger.setLevel(logging.DEBUG) -@pytest.mark.parametrize("logger", [logger, other_logger]) -def test_logging_works_with_many_loggers(sentry_init, capture_events, logger): - sentry_init(integrations=[LoggingIntegration(event_level="ERROR")]) +@pytest.mark.parametrize("integrations", [None, [], [LoggingIntegration()]]) +@pytest.mark.parametrize( + "kwargs", [{"exc_info": None}, {}, {"exc_info": 0}, {"exc_info": False}] +) +def test_logging_defaults(integrations, sentry_init, capture_events, kwargs): + sentry_init(integrations=integrations) events = capture_events() logger.info("bread") - logger.critical("LOL") - (event,) = events - assert event["level"] == "fatal" - assert not event["logentry"]["params"] - assert event["logentry"]["message"] == "LOL" - assert event["logentry"]["formatted"] == "LOL" - assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]["values"]) + logger.error("error") + logger.critical("LOL", **kwargs) + + assert len(events) == 0 -@pytest.mark.parametrize("integrations", [None, [], [LoggingIntegration()]]) @pytest.mark.parametrize( "kwargs", [{"exc_info": None}, {}, {"exc_info": 0}, {"exc_info": False}] ) -def test_logging_defaults(integrations, sentry_init, capture_events, kwargs): - sentry_init(integrations=integrations) +def test_logging_basic(sentry_init, capture_events, kwargs): + sentry_init(integrations=[LoggingIntegration(event_level=logging.ERROR)]) events = capture_events() logger.info("bread") + logger.error("error") logger.critical("LOL", **kwargs) - (event,) = events + (error_event, critical_event) = events - assert event["level"] == "fatal" - assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]["values"]) + assert error_event["level"] == "error" + assert any( + crumb["message"] == "bread" for crumb in error_event["breadcrumbs"]["values"] + ) + assert not any( + crumb["message"] == "LOL" for crumb in error_event["breadcrumbs"]["values"] + ) + assert "threads" not in error_event + + assert critical_event["level"] == "fatal" + assert any( + crumb["message"] == "bread" for crumb in critical_event["breadcrumbs"]["values"] + ) assert not any( - crumb["message"] == "LOL" for crumb in event["breadcrumbs"]["values"] + crumb["message"] == "LOL" for crumb in critical_event["breadcrumbs"]["values"] ) - assert "threads" not in event + assert "threads" not in critical_event + + +@pytest.mark.parametrize("logger", [logger, other_logger]) +def test_logging_works_with_many_loggers(sentry_init, capture_events, logger): + sentry_init(integrations=[LoggingIntegration(event_level="ERROR")]) + events = capture_events() + + logger.info("bread") + logger.critical("LOL") + (event,) = events + assert event["level"] == "fatal" + assert not event["logentry"]["params"] + assert event["logentry"]["message"] == "LOL" + assert event["logentry"]["formatted"] == "LOL" + assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]["values"]) def test_logging_extra_data(sentry_init, capture_events): - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() logger.info("bread", extra=dict(foo=42)) @@ -71,7 +100,10 @@ def test_logging_extra_data(sentry_init, capture_events): def test_logging_extra_data_integer_keys(sentry_init, capture_events): - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() logger.critical("integer in extra keys", extra={1: 1}) @@ -89,7 +121,10 @@ def test_logging_extra_data_integer_keys(sentry_init, capture_events): ), ) def test_logging_stack_trace(sentry_init, capture_events, enable_stack_trace_kwarg): - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() logger.error("first", **enable_stack_trace_kwarg) @@ -108,7 +143,10 @@ def test_logging_stack_trace(sentry_init, capture_events, enable_stack_trace_kwa def test_logging_level(sentry_init, capture_events): - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() logger.setLevel(logging.WARNING) @@ -164,7 +202,10 @@ def test_custom_log_level_names(sentry_init, capture_events): def test_logging_filters(sentry_init, capture_events): - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() should_log = False @@ -218,12 +259,15 @@ def test_logging_captured_warnings(sentry_init, capture_events, recwarn): assert events[1]["logentry"]["params"] == [] # Using recwarn suppresses the "third" warning in the test output - assert len(recwarn) == 1 - assert str(recwarn[0].message) == "third" + third_warnings = [w for w in recwarn if str(w.message) == "third"] + assert len(third_warnings) == 1 def test_ignore_logger(sentry_init, capture_events): - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() ignore_logger("testfoo") @@ -246,7 +290,10 @@ def test_ignore_logger_whitespace_padding(sentry_init, capture_events): def test_ignore_logger_wildcard(sentry_init, capture_events): - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() ignore_logger("testfoo.*") @@ -264,7 +311,10 @@ def test_ignore_logger_wildcard(sentry_init, capture_events): def test_logging_dictionary_interpolation(sentry_init, capture_events): """Here we test an entire dictionary being interpolated into the log message.""" - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() logger.error("this is a log with a dictionary %s", {"foo": "bar"}) @@ -280,7 +330,10 @@ def test_logging_dictionary_interpolation(sentry_init, capture_events): def test_logging_dictionary_args(sentry_init, capture_events): """Here we test items from a dictionary being interpolated into the log message.""" - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() logger.error( @@ -369,10 +422,10 @@ def test_logging_errors(sentry_init, capture_envelopes): python_logger.error("error is %s", Exception("test exc 2")) get_client().flush() - error_event_1 = envelopes[0].items[0].payload.json - assert error_event_1["level"] == "error" - error_event_2 = envelopes[1].items[0].payload.json - assert error_event_2["level"] == "error" + for envelope in envelopes: + for item in envelope.items: + for subitem in item.payload.json["items"]: + assert subitem["level"] == "error" logs = envelopes_to_logs(envelopes) assert logs[0]["severity_text"] == "error" diff --git a/tests/integrations/openai/test_openai.py b/tests/integrations/openai/test_openai.py index a3c7bdd9d9..86b167c31d 100644 --- a/tests/integrations/openai/test_openai.py +++ b/tests/integrations/openai/test_openai.py @@ -33,7 +33,7 @@ except ImportError: SKIP_RESPONSES_TESTS = True -from sentry_sdk import start_transaction +import sentry_sdk from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.openai import ( OpenAIIntegration, @@ -136,7 +136,7 @@ def test_nonstreaming_chat_completion( client = OpenAI(api_key="z") client.chat.completions._post = mock.Mock(return_value=EXAMPLE_CHAT_COMPLETION) - with start_transaction(name="openai tx"): + with sentry_sdk.start_span(name="openai tx"): response = ( client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] @@ -181,7 +181,7 @@ async def test_nonstreaming_chat_completion_async( client = AsyncOpenAI(api_key="z") client.chat.completions._post = AsyncMock(return_value=EXAMPLE_CHAT_COMPLETION) - with start_transaction(name="openai tx"): + with sentry_sdk.start_span(name="openai tx"): response = await client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] ) @@ -273,7 +273,7 @@ def test_streaming_chat_completion( ] client.chat.completions._post = mock.Mock(return_value=returned_stream) - with start_transaction(name="openai tx"): + with sentry_sdk.start_span(name="openai tx"): response_stream = client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] ) @@ -367,7 +367,7 @@ async def test_streaming_chat_completion_async( ) client.chat.completions._post = AsyncMock(return_value=returned_stream) - with start_transaction(name="openai tx"): + with sentry_sdk.start_span(name="openai tx"): response_stream = await client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] ) @@ -399,6 +399,7 @@ async def test_streaming_chat_completion_async( pass # if tiktoken is not installed, we can't guarantee token usage will be calculated properly +@pytest.mark.forked def test_bad_chat_completion(sentry_init, capture_events): sentry_init(integrations=[OpenAIIntegration()], traces_sample_rate=1.0) events = capture_events() @@ -412,7 +413,10 @@ def test_bad_chat_completion(sentry_init, capture_events): model="some-model", messages=[{"role": "system", "content": "hello"}] ) - (event,) = events + ( + _, + event, + ) = events assert event["level"] == "error" @@ -430,7 +434,10 @@ async def test_bad_chat_completion_async(sentry_init, capture_events): model="some-model", messages=[{"role": "system", "content": "hello"}] ) - (event,) = events + ( + _, + event, + ) = events assert event["level"] == "error" @@ -461,7 +468,7 @@ def test_embeddings_create( ) client.embeddings._post = mock.Mock(return_value=returned_embedding) - with start_transaction(name="openai tx"): + with sentry_sdk.start_span(name="openai tx"): response = client.embeddings.create( input="hello", model="text-embedding-3-large" ) @@ -509,7 +516,7 @@ async def test_embeddings_create_async( ) client.embeddings._post = AsyncMock(return_value=returned_embedding) - with start_transaction(name="openai tx"): + with sentry_sdk.start_span(name="openai tx"): response = await client.embeddings.create( input="hello", model="text-embedding-3-large" ) @@ -529,6 +536,7 @@ async def test_embeddings_create_async( assert span["data"]["gen_ai.usage.total_tokens"] == 30 +@pytest.mark.forked @pytest.mark.parametrize( "send_default_pii, include_prompts", [(True, True), (True, False), (False, True), (False, False)], @@ -552,10 +560,14 @@ def test_embeddings_create_raises_error( with pytest.raises(OpenAIError): client.embeddings.create(input="hello", model="text-embedding-3-large") - (event,) = events + ( + _, + event, + ) = events assert event["level"] == "error" +@pytest.mark.forked @pytest.mark.asyncio @pytest.mark.parametrize( "send_default_pii, include_prompts", @@ -580,7 +592,10 @@ async def test_embeddings_create_raises_error_async( with pytest.raises(OpenAIError): await client.embeddings.create(input="hello", model="text-embedding-3-large") - (event,) = events + ( + _, + event, + ) = events assert event["level"] == "error" @@ -594,7 +609,7 @@ def test_span_origin_nonstreaming_chat(sentry_init, capture_events): client = OpenAI(api_key="z") client.chat.completions._post = mock.Mock(return_value=EXAMPLE_CHAT_COMPLETION) - with start_transaction(name="openai tx"): + with sentry_sdk.start_span(name="openai tx"): client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] ) @@ -616,7 +631,7 @@ async def test_span_origin_nonstreaming_chat_async(sentry_init, capture_events): client = AsyncOpenAI(api_key="z") client.chat.completions._post = AsyncMock(return_value=EXAMPLE_CHAT_COMPLETION) - with start_transaction(name="openai tx"): + with sentry_sdk.start_span(name="openai tx"): await client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] ) @@ -673,7 +688,7 @@ def test_span_origin_streaming_chat(sentry_init, capture_events): ] client.chat.completions._post = mock.Mock(return_value=returned_stream) - with start_transaction(name="openai tx"): + with sentry_sdk.start_span(name="openai tx"): response_stream = client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] ) @@ -737,7 +752,7 @@ async def test_span_origin_streaming_chat_async(sentry_init, capture_events): ) client.chat.completions._post = AsyncMock(return_value=returned_stream) - with start_transaction(name="openai tx"): + with sentry_sdk.start_span(name="openai tx"): response_stream = await client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] ) @@ -772,7 +787,7 @@ def test_span_origin_embeddings(sentry_init, capture_events): ) client.embeddings._post = mock.Mock(return_value=returned_embedding) - with start_transaction(name="openai tx"): + with sentry_sdk.start_span(name="openai tx"): client.embeddings.create(input="hello", model="text-embedding-3-large") (event,) = events @@ -802,7 +817,7 @@ async def test_span_origin_embeddings_async(sentry_init, capture_events): ) client.embeddings._post = AsyncMock(return_value=returned_embedding) - with start_transaction(name="openai tx"): + with sentry_sdk.start_span(name="openai tx"): await client.embeddings.create(input="hello", model="text-embedding-3-large") (event,) = events @@ -978,7 +993,7 @@ def test_ai_client_span_responses_api_no_pii(sentry_init, capture_events): client = OpenAI(api_key="z") client.responses._post = mock.Mock(return_value=EXAMPLE_RESPONSE) - with start_transaction(name="openai tx"): + with sentry_sdk.start_span(name="openai tx"): client.responses.create( model="gpt-4o", instructions="You are a coding assistant that talks like a pirate.", @@ -1001,6 +1016,10 @@ def test_ai_client_span_responses_api_no_pii(sentry_init, capture_events): "gen_ai.usage.output_tokens": 10, "gen_ai.usage.output_tokens.reasoning": 8, "gen_ai.usage.total_tokens": 30, + "sentry.name": "responses gpt-4o", + "sentry.op": "gen_ai.responses", + "sentry.origin": "auto.ai.openai", + "sentry.source": "custom", "thread.id": mock.ANY, "thread.name": mock.ANY, } @@ -1021,7 +1040,7 @@ def test_ai_client_span_responses_api(sentry_init, capture_events): client = OpenAI(api_key="z") client.responses._post = mock.Mock(return_value=EXAMPLE_RESPONSE) - with start_transaction(name="openai tx"): + with sentry_sdk.start_span(name="openai tx"): client.responses.create( model="gpt-4o", instructions="You are a coding assistant that talks like a pirate.", @@ -1046,6 +1065,10 @@ def test_ai_client_span_responses_api(sentry_init, capture_events): "gen_ai.usage.output_tokens.reasoning": 8, "gen_ai.usage.total_tokens": 30, "gen_ai.response.text": '[{"id": "message-id", "content": [{"annotations": [], "text": "the model response", "type": "output_text"}], "role": "assistant", "status": "completed", "type": "message"}]', + "sentry.name": "responses gpt-4o", + "sentry.op": "gen_ai.responses", + "sentry.origin": "auto.ai.openai", + "sentry.source": "custom", "thread.id": mock.ANY, "thread.name": mock.ANY, } @@ -1065,7 +1088,7 @@ def test_error_in_responses_api(sentry_init, capture_events): side_effect=OpenAIError("API rate limit reached") ) - with start_transaction(name="openai tx"): + with sentry_sdk.start_span(name="openai tx"): with pytest.raises(OpenAIError): client.responses.create( model="gpt-4o", @@ -1101,7 +1124,7 @@ async def test_ai_client_span_responses_async_api(sentry_init, capture_events): client = AsyncOpenAI(api_key="z") client.responses._post = AsyncMock(return_value=EXAMPLE_RESPONSE) - with start_transaction(name="openai tx"): + with sentry_sdk.start_span(name="openai tx"): await client.responses.create( model="gpt-4o", instructions="You are a coding assistant that talks like a pirate.", @@ -1126,6 +1149,10 @@ async def test_ai_client_span_responses_async_api(sentry_init, capture_events): "gen_ai.usage.output_tokens.reasoning": 8, "gen_ai.usage.total_tokens": 30, "gen_ai.response.text": '[{"id": "message-id", "content": [{"annotations": [], "text": "the model response", "type": "output_text"}], "role": "assistant", "status": "completed", "type": "message"}]', + "sentry.name": "responses gpt-4o", + "sentry.op": "gen_ai.responses", + "sentry.origin": "auto.ai.openai", + "sentry.source": "custom", "thread.id": mock.ANY, "thread.name": mock.ANY, } @@ -1146,7 +1173,7 @@ async def test_ai_client_span_streaming_responses_async_api( client = AsyncOpenAI(api_key="z") client.responses._post = AsyncMock(return_value=EXAMPLE_RESPONSE) - with start_transaction(name="openai tx"): + with sentry_sdk.start_span(name="openai tx"): await client.responses.create( model="gpt-4o", instructions="You are a coding assistant that talks like a pirate.", @@ -1173,6 +1200,10 @@ async def test_ai_client_span_streaming_responses_async_api( "gen_ai.usage.output_tokens.reasoning": 8, "gen_ai.usage.total_tokens": 30, "gen_ai.response.text": '[{"id": "message-id", "content": [{"annotations": [], "text": "the model response", "type": "output_text"}], "role": "assistant", "status": "completed", "type": "message"}]', + "sentry.name": "responses gpt-4o", + "sentry.op": "gen_ai.responses", + "sentry.origin": "auto.ai.openai", + "sentry.source": "custom", "thread.id": mock.ANY, "thread.name": mock.ANY, } @@ -1193,7 +1224,7 @@ async def test_error_in_responses_async_api(sentry_init, capture_events): side_effect=OpenAIError("API rate limit reached") ) - with start_transaction(name="openai tx"): + with sentry_sdk.start_span(name="openai tx"): with pytest.raises(OpenAIError): await client.responses.create( model="gpt-4o", @@ -1313,7 +1344,7 @@ def test_streaming_responses_api( returned_stream._iterator = EXAMPLE_RESPONSES_STREAM client.responses._post = mock.Mock(return_value=returned_stream) - with start_transaction(name="openai tx"): + with sentry_sdk.start_span(name="openai tx"): response_stream = client.responses.create( model="some-model", input="hello", @@ -1368,7 +1399,7 @@ async def test_streaming_responses_api_async( returned_stream._iterator = async_iterator(EXAMPLE_RESPONSES_STREAM) client.responses._post = AsyncMock(return_value=returned_stream) - with start_transaction(name="openai tx"): + with sentry_sdk.start_span(name="openai tx"): response_stream = await client.responses.create( model="some-model", input="hello", @@ -1416,7 +1447,7 @@ def test_empty_tools_in_chat_completion(sentry_init, capture_events, tools): client = OpenAI(api_key="z") client.chat.completions._post = mock.Mock(return_value=EXAMPLE_CHAT_COMPLETION) - with start_transaction(name="openai tx"): + with sentry_sdk.start_span(name="openai tx"): client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}], diff --git a/tests/integrations/openai_agents/test_openai_agents.py b/tests/integrations/openai_agents/test_openai_agents.py index 3f64e5c45c..2f1ff686bd 100644 --- a/tests/integrations/openai_agents/test_openai_agents.py +++ b/tests/integrations/openai_agents/test_openai_agents.py @@ -322,10 +322,10 @@ async def test_handoff_span(sentry_init, capture_events, mock_usage): (transaction,) = events spans = transaction["spans"] - handoff_span = spans[2] - # Verify handoff span was created - assert handoff_span is not None + # There should be exactly one handoff span + (handoff_span,) = [span for span in spans if span["op"] == "gen_ai.handoff"] + assert ( handoff_span["description"] == "handoff from primary_agent to secondary_agent" ) @@ -413,12 +413,25 @@ def simple_test_tool(message: str) -> str: (transaction,) = events spans = transaction["spans"] - ( - agent_span, - ai_client_span1, - tool_span, - ai_client_span2, - ) = spans + + assert len(spans) == 4 + + # Find each span by its characteristics + agent_span = next(s for s in spans if s["description"] == "invoke_agent test_agent") + tool_span = next( + s for s in spans if s["description"] == "execute_tool simple_test_tool" + ) + ai_client_span1 = next( + s + for s in spans + if s["description"] == "chat gpt-4" + and "gen_ai.response.tool_calls" in s["data"] + ) + ai_client_span2 = next( + s + for s in spans + if s["description"] == "chat gpt-4" and "gen_ai.response.text" in s["data"] + ) available_tools = safe_serialize( [ @@ -636,4 +649,4 @@ async def test_error_handling(sentry_init, capture_events, test_agent): assert ai_client_span["description"] == "chat gpt-4" assert ai_client_span["origin"] == "auto.ai.openai_agents" - assert ai_client_span["tags"]["status"] == "internal_error" + assert ai_client_span["status"] == "internal_error" diff --git a/tests/integrations/opentelemetry/test_experimental.py b/tests/integrations/opentelemetry/test_experimental.py deleted file mode 100644 index 8e4b703361..0000000000 --- a/tests/integrations/opentelemetry/test_experimental.py +++ /dev/null @@ -1,47 +0,0 @@ -from unittest.mock import MagicMock, patch - -import pytest - - -@pytest.mark.forked -def test_integration_enabled_if_option_is_on(sentry_init, reset_integrations): - mocked_setup_once = MagicMock() - - with patch( - "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration.setup_once", - mocked_setup_once, - ): - sentry_init( - _experiments={ - "otel_powered_performance": True, - }, - ) - mocked_setup_once.assert_called_once() - - -@pytest.mark.forked -def test_integration_not_enabled_if_option_is_off(sentry_init, reset_integrations): - mocked_setup_once = MagicMock() - - with patch( - "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration.setup_once", - mocked_setup_once, - ): - sentry_init( - _experiments={ - "otel_powered_performance": False, - }, - ) - mocked_setup_once.assert_not_called() - - -@pytest.mark.forked -def test_integration_not_enabled_if_option_is_missing(sentry_init, reset_integrations): - mocked_setup_once = MagicMock() - - with patch( - "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration.setup_once", - mocked_setup_once, - ): - sentry_init() - mocked_setup_once.assert_not_called() diff --git a/tests/integrations/opentelemetry/test_propagator.py b/tests/integrations/opentelemetry/test_propagator.py deleted file mode 100644 index d999b0bb2b..0000000000 --- a/tests/integrations/opentelemetry/test_propagator.py +++ /dev/null @@ -1,300 +0,0 @@ -import pytest - -from unittest import mock -from unittest.mock import MagicMock - -from opentelemetry.context import get_current -from opentelemetry.trace import ( - SpanContext, - TraceFlags, - set_span_in_context, -) -from opentelemetry.trace.propagation import get_current_span - -from sentry_sdk.integrations.opentelemetry.consts import ( - SENTRY_BAGGAGE_KEY, - SENTRY_TRACE_KEY, -) -from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator -from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor -from sentry_sdk.tracing_utils import Baggage - - -@pytest.mark.forked -def test_extract_no_context_no_sentry_trace_header(): - """ - No context and NO Sentry trace data in getter. - Extract should return empty context. - """ - carrier = None - context = None - getter = MagicMock() - getter.get.return_value = None - - modified_context = SentryPropagator().extract(carrier, context, getter) - - assert modified_context == {} - - -@pytest.mark.forked -def test_extract_context_no_sentry_trace_header(): - """ - Context but NO Sentry trace data in getter. - Extract should return context as is. - """ - carrier = None - context = {"some": "value"} - getter = MagicMock() - getter.get.return_value = None - - modified_context = SentryPropagator().extract(carrier, context, getter) - - assert modified_context == context - - -@pytest.mark.forked -def test_extract_empty_context_sentry_trace_header_no_baggage(): - """ - Empty context but Sentry trace data but NO Baggage in getter. - Extract should return context that has empty baggage in it and also a NoopSpan with span_id and trace_id. - """ - carrier = None - context = {} - getter = MagicMock() - getter.get.side_effect = [ - ["1234567890abcdef1234567890abcdef-1234567890abcdef-1"], - None, - ] - - modified_context = SentryPropagator().extract(carrier, context, getter) - - assert len(modified_context.keys()) == 3 - - assert modified_context[SENTRY_TRACE_KEY] == { - "trace_id": "1234567890abcdef1234567890abcdef", - "parent_span_id": "1234567890abcdef", - "parent_sampled": True, - } - assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == "" - - span_context = get_current_span(modified_context).get_span_context() - assert span_context.span_id == int("1234567890abcdef", 16) - assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16) - - -@pytest.mark.forked -def test_extract_context_sentry_trace_header_baggage(): - """ - Empty context but Sentry trace data and Baggage in getter. - Extract should return context that has baggage in it and also a NoopSpan with span_id and trace_id. - """ - baggage_header = ( - "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, " - "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, " - "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;" - ) - - carrier = None - context = {"some": "value"} - getter = MagicMock() - getter.get.side_effect = [ - ["1234567890abcdef1234567890abcdef-1234567890abcdef-1"], - [baggage_header], - ] - - modified_context = SentryPropagator().extract(carrier, context, getter) - - assert len(modified_context.keys()) == 4 - - assert modified_context[SENTRY_TRACE_KEY] == { - "trace_id": "1234567890abcdef1234567890abcdef", - "parent_span_id": "1234567890abcdef", - "parent_sampled": True, - } - - assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == ( - "sentry-trace_id=771a43a4192642f0b136d5159a501700," - "sentry-public_key=49d0f7386ad645858ae85020e393bef3," - "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie" - ) - - span_context = get_current_span(modified_context).get_span_context() - assert span_context.span_id == int("1234567890abcdef", 16) - assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16) - - -@pytest.mark.forked -def test_inject_empty_otel_span_map(): - """ - Empty otel_span_map. - So there is no sentry_span to be found in inject() - and the function is returned early and no setters are called. - """ - carrier = None - context = get_current() - setter = MagicMock() - setter.set = MagicMock() - - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - trace_flags=TraceFlags(TraceFlags.SAMPLED), - is_remote=True, - ) - span = MagicMock() - span.get_span_context.return_value = span_context - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span", - return_value=span, - ): - full_context = set_span_in_context(span, context) - SentryPropagator().inject(carrier, full_context, setter) - - setter.set.assert_not_called() - - -@pytest.mark.forked -def test_inject_sentry_span_no_baggage(): - """ - Inject a sentry span with no baggage. - """ - carrier = None - context = get_current() - setter = MagicMock() - setter.set = MagicMock() - - trace_id = "1234567890abcdef1234567890abcdef" - span_id = "1234567890abcdef" - - span_context = SpanContext( - trace_id=int(trace_id, 16), - span_id=int(span_id, 16), - trace_flags=TraceFlags(TraceFlags.SAMPLED), - is_remote=True, - ) - span = MagicMock() - span.get_span_context.return_value = span_context - - sentry_span = MagicMock() - sentry_span.to_traceparent = mock.Mock( - return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1" - ) - sentry_span.containing_transaction.get_baggage = mock.Mock(return_value=None) - - span_processor = SentrySpanProcessor() - span_processor.otel_span_map[span_id] = sentry_span - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span", - return_value=span, - ): - full_context = set_span_in_context(span, context) - SentryPropagator().inject(carrier, full_context, setter) - - setter.set.assert_called_once_with( - carrier, - "sentry-trace", - "1234567890abcdef1234567890abcdef-1234567890abcdef-1", - ) - - -def test_inject_sentry_span_empty_baggage(): - """ - Inject a sentry span with no baggage. - """ - carrier = None - context = get_current() - setter = MagicMock() - setter.set = MagicMock() - - trace_id = "1234567890abcdef1234567890abcdef" - span_id = "1234567890abcdef" - - span_context = SpanContext( - trace_id=int(trace_id, 16), - span_id=int(span_id, 16), - trace_flags=TraceFlags(TraceFlags.SAMPLED), - is_remote=True, - ) - span = MagicMock() - span.get_span_context.return_value = span_context - - sentry_span = MagicMock() - sentry_span.to_traceparent = mock.Mock( - return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1" - ) - sentry_span.containing_transaction.get_baggage = mock.Mock(return_value=Baggage({})) - - span_processor = SentrySpanProcessor() - span_processor.otel_span_map[span_id] = sentry_span - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span", - return_value=span, - ): - full_context = set_span_in_context(span, context) - SentryPropagator().inject(carrier, full_context, setter) - - setter.set.assert_called_once_with( - carrier, - "sentry-trace", - "1234567890abcdef1234567890abcdef-1234567890abcdef-1", - ) - - -def test_inject_sentry_span_baggage(): - """ - Inject a sentry span with baggage. - """ - carrier = None - context = get_current() - setter = MagicMock() - setter.set = MagicMock() - - trace_id = "1234567890abcdef1234567890abcdef" - span_id = "1234567890abcdef" - - span_context = SpanContext( - trace_id=int(trace_id, 16), - span_id=int(span_id, 16), - trace_flags=TraceFlags(TraceFlags.SAMPLED), - is_remote=True, - ) - span = MagicMock() - span.get_span_context.return_value = span_context - - sentry_span = MagicMock() - sentry_span.to_traceparent = mock.Mock( - return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1" - ) - sentry_items = { - "sentry-trace_id": "771a43a4192642f0b136d5159a501700", - "sentry-public_key": "49d0f7386ad645858ae85020e393bef3", - "sentry-sample_rate": 0.01337, - "sentry-user_id": "Amélie", - } - baggage = Baggage(sentry_items=sentry_items) - sentry_span.containing_transaction.get_baggage = MagicMock(return_value=baggage) - - span_processor = SentrySpanProcessor() - span_processor.otel_span_map[span_id] = sentry_span - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span", - return_value=span, - ): - full_context = set_span_in_context(span, context) - SentryPropagator().inject(carrier, full_context, setter) - - setter.set.assert_any_call( - carrier, - "sentry-trace", - "1234567890abcdef1234567890abcdef-1234567890abcdef-1", - ) - - setter.set.assert_any_call( - carrier, - "baggage", - baggage.serialize(), - ) diff --git a/tests/integrations/opentelemetry/test_span_processor.py b/tests/integrations/opentelemetry/test_span_processor.py deleted file mode 100644 index ec5cf6af23..0000000000 --- a/tests/integrations/opentelemetry/test_span_processor.py +++ /dev/null @@ -1,608 +0,0 @@ -import time -from datetime import datetime, timezone -from unittest import mock -from unittest.mock import MagicMock - -import pytest -from opentelemetry.trace import SpanKind, SpanContext, Status, StatusCode - -import sentry_sdk -from sentry_sdk.integrations.opentelemetry.span_processor import ( - SentrySpanProcessor, - link_trace_context_to_error_event, -) -from sentry_sdk.tracing import Span, Transaction -from sentry_sdk.tracing_utils import extract_sentrytrace_data - - -def test_is_sentry_span(): - otel_span = MagicMock() - - span_processor = SentrySpanProcessor() - assert not span_processor._is_sentry_span(otel_span) - - client = MagicMock() - client.options = {"instrumenter": "otel"} - client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" - sentry_sdk.get_global_scope().set_client(client) - - assert not span_processor._is_sentry_span(otel_span) - - otel_span.attributes = { - "http.url": "https://example.com", - } - assert not span_processor._is_sentry_span(otel_span) - - otel_span.attributes = { - "http.url": "https://o123456.ingest.sentry.io/api/123/envelope", - } - assert span_processor._is_sentry_span(otel_span) - - -def test_get_otel_context(): - otel_span = MagicMock() - otel_span.attributes = {"foo": "bar"} - otel_span.resource = MagicMock() - otel_span.resource.attributes = {"baz": "qux"} - - span_processor = SentrySpanProcessor() - otel_context = span_processor._get_otel_context(otel_span) - - assert otel_context == { - "attributes": {"foo": "bar"}, - "resource": {"baz": "qux"}, - } - - -def test_get_trace_data_with_span_and_trace(): - otel_span = MagicMock() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = None - - parent_context = {} - - span_processor = SentrySpanProcessor() - sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context) - assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef" - assert sentry_trace_data["span_id"] == "1234567890abcdef" - assert sentry_trace_data["parent_span_id"] is None - assert sentry_trace_data["parent_sampled"] is None - assert sentry_trace_data["baggage"] is None - - -def test_get_trace_data_with_span_and_trace_and_parent(): - otel_span = MagicMock() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = MagicMock() - otel_span.parent.span_id = int("abcdef1234567890", 16) - - parent_context = {} - - span_processor = SentrySpanProcessor() - sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context) - assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef" - assert sentry_trace_data["span_id"] == "1234567890abcdef" - assert sentry_trace_data["parent_span_id"] == "abcdef1234567890" - assert sentry_trace_data["parent_sampled"] is None - assert sentry_trace_data["baggage"] is None - - -def test_get_trace_data_with_sentry_trace(): - otel_span = MagicMock() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = MagicMock() - otel_span.parent.span_id = int("abcdef1234567890", 16) - - parent_context = {} - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.span_processor.get_value", - side_effect=[ - extract_sentrytrace_data( - "1234567890abcdef1234567890abcdef-1234567890abcdef-1" - ), - None, - ], - ): - span_processor = SentrySpanProcessor() - sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context) - assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef" - assert sentry_trace_data["span_id"] == "1234567890abcdef" - assert sentry_trace_data["parent_span_id"] == "abcdef1234567890" - assert sentry_trace_data["parent_sampled"] is True - assert sentry_trace_data["baggage"] is None - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.span_processor.get_value", - side_effect=[ - extract_sentrytrace_data( - "1234567890abcdef1234567890abcdef-1234567890abcdef-0" - ), - None, - ], - ): - span_processor = SentrySpanProcessor() - sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context) - assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef" - assert sentry_trace_data["span_id"] == "1234567890abcdef" - assert sentry_trace_data["parent_span_id"] == "abcdef1234567890" - assert sentry_trace_data["parent_sampled"] is False - assert sentry_trace_data["baggage"] is None - - -def test_get_trace_data_with_sentry_trace_and_baggage(): - otel_span = MagicMock() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = MagicMock() - otel_span.parent.span_id = int("abcdef1234567890", 16) - - parent_context = {} - - baggage = ( - "sentry-trace_id=771a43a4192642f0b136d5159a501700," - "sentry-public_key=49d0f7386ad645858ae85020e393bef3," - "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie" - ) - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.span_processor.get_value", - side_effect=[ - extract_sentrytrace_data( - "1234567890abcdef1234567890abcdef-1234567890abcdef-1" - ), - baggage, - ], - ): - span_processor = SentrySpanProcessor() - sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context) - assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef" - assert sentry_trace_data["span_id"] == "1234567890abcdef" - assert sentry_trace_data["parent_span_id"] == "abcdef1234567890" - assert sentry_trace_data["parent_sampled"] - assert sentry_trace_data["baggage"] == baggage - - -def test_update_span_with_otel_data_http_method(): - sentry_span = Span() - - otel_span = MagicMock() - otel_span.name = "Test OTel Span" - otel_span.kind = SpanKind.CLIENT - otel_span.attributes = { - "http.method": "GET", - "http.status_code": 429, - "http.status_text": "xxx", - "http.user_agent": "curl/7.64.1", - "net.peer.name": "example.com", - "http.target": "/", - } - - span_processor = SentrySpanProcessor() - span_processor._update_span_with_otel_data(sentry_span, otel_span) - - assert sentry_span.op == "http.client" - assert sentry_span.description == "GET example.com /" - assert sentry_span.status == "resource_exhausted" - - assert sentry_span._data["http.method"] == "GET" - assert sentry_span._data["http.response.status_code"] == 429 - assert sentry_span._data["http.status_text"] == "xxx" - assert sentry_span._data["http.user_agent"] == "curl/7.64.1" - assert sentry_span._data["net.peer.name"] == "example.com" - assert sentry_span._data["http.target"] == "/" - - -@pytest.mark.parametrize( - "otel_status, expected_status", - [ - pytest.param(Status(StatusCode.UNSET), None, id="unset"), - pytest.param(Status(StatusCode.OK), "ok", id="ok"), - pytest.param(Status(StatusCode.ERROR), "internal_error", id="error"), - ], -) -def test_update_span_with_otel_status(otel_status, expected_status): - sentry_span = Span() - - otel_span = MagicMock() - otel_span.name = "Test OTel Span" - otel_span.kind = SpanKind.INTERNAL - otel_span.status = otel_status - - span_processor = SentrySpanProcessor() - span_processor._update_span_with_otel_status(sentry_span, otel_span) - - assert sentry_span.get_trace_context().get("status") == expected_status - - -def test_update_span_with_otel_data_http_method2(): - sentry_span = Span() - - otel_span = MagicMock() - otel_span.name = "Test OTel Span" - otel_span.kind = SpanKind.SERVER - otel_span.attributes = { - "http.method": "GET", - "http.status_code": 429, - "http.status_text": "xxx", - "http.user_agent": "curl/7.64.1", - "http.url": "https://example.com/status/403?password=123&username=test@example.com&author=User123&auth=1234567890abcdef", - } - - span_processor = SentrySpanProcessor() - span_processor._update_span_with_otel_data(sentry_span, otel_span) - - assert sentry_span.op == "http.server" - assert sentry_span.description == "GET https://example.com/status/403" - assert sentry_span.status == "resource_exhausted" - - assert sentry_span._data["http.method"] == "GET" - assert sentry_span._data["http.response.status_code"] == 429 - assert sentry_span._data["http.status_text"] == "xxx" - assert sentry_span._data["http.user_agent"] == "curl/7.64.1" - assert ( - sentry_span._data["http.url"] - == "https://example.com/status/403?password=123&username=test@example.com&author=User123&auth=1234567890abcdef" - ) - - -def test_update_span_with_otel_data_db_query(): - sentry_span = Span() - - otel_span = MagicMock() - otel_span.name = "Test OTel Span" - otel_span.attributes = { - "db.system": "postgresql", - "db.statement": "SELECT * FROM table where pwd = '123456'", - } - - span_processor = SentrySpanProcessor() - span_processor._update_span_with_otel_data(sentry_span, otel_span) - - assert sentry_span.op == "db" - assert sentry_span.description == "SELECT * FROM table where pwd = '123456'" - - assert sentry_span._data["db.system"] == "postgresql" - assert ( - sentry_span._data["db.statement"] == "SELECT * FROM table where pwd = '123456'" - ) - - -def test_on_start_transaction(): - otel_span = MagicMock() - otel_span.name = "Sample OTel Span" - otel_span.start_time = time.time_ns() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = MagicMock() - otel_span.parent.span_id = int("abcdef1234567890", 16) - - parent_context = {} - - fake_start_transaction = MagicMock() - - fake_client = MagicMock() - fake_client.options = {"instrumenter": "otel"} - fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" - sentry_sdk.get_global_scope().set_client(fake_client) - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.span_processor.start_transaction", - fake_start_transaction, - ): - span_processor = SentrySpanProcessor() - span_processor.on_start(otel_span, parent_context) - - fake_start_transaction.assert_called_once_with( - name="Sample OTel Span", - span_id="1234567890abcdef", - parent_span_id="abcdef1234567890", - trace_id="1234567890abcdef1234567890abcdef", - baggage=None, - start_timestamp=datetime.fromtimestamp( - otel_span.start_time / 1e9, timezone.utc - ), - instrumenter="otel", - origin="auto.otel", - ) - - assert len(span_processor.otel_span_map.keys()) == 1 - assert list(span_processor.otel_span_map.keys())[0] == "1234567890abcdef" - - -def test_on_start_child(): - otel_span = MagicMock() - otel_span.name = "Sample OTel Span" - otel_span.start_time = time.time_ns() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = MagicMock() - otel_span.parent.span_id = int("abcdef1234567890", 16) - - parent_context = {} - - fake_client = MagicMock() - fake_client.options = {"instrumenter": "otel"} - fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" - sentry_sdk.get_global_scope().set_client(fake_client) - - fake_span = MagicMock() - - span_processor = SentrySpanProcessor() - span_processor.otel_span_map["abcdef1234567890"] = fake_span - span_processor.on_start(otel_span, parent_context) - - fake_span.start_child.assert_called_once_with( - span_id="1234567890abcdef", - name="Sample OTel Span", - start_timestamp=datetime.fromtimestamp( - otel_span.start_time / 1e9, timezone.utc - ), - instrumenter="otel", - origin="auto.otel", - ) - - assert len(span_processor.otel_span_map.keys()) == 2 - assert "abcdef1234567890" in span_processor.otel_span_map.keys() - assert "1234567890abcdef" in span_processor.otel_span_map.keys() - - -def test_on_end_no_sentry_span(): - """ - If on_end is called on a span that is not in the otel_span_map, it should be a no-op. - """ - otel_span = MagicMock() - otel_span.name = "Sample OTel Span" - otel_span.end_time = time.time_ns() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - - span_processor = SentrySpanProcessor() - span_processor.otel_span_map = {} - span_processor._get_otel_context = MagicMock() - span_processor._update_span_with_otel_data = MagicMock() - - span_processor.on_end(otel_span) - - span_processor._get_otel_context.assert_not_called() - span_processor._update_span_with_otel_data.assert_not_called() - - -def test_on_end_sentry_transaction(): - """ - Test on_end for a sentry Transaction. - """ - otel_span = MagicMock() - otel_span.name = "Sample OTel Span" - otel_span.end_time = time.time_ns() - otel_span.status = Status(StatusCode.OK) - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - - fake_client = MagicMock() - fake_client.options = {"instrumenter": "otel"} - sentry_sdk.get_global_scope().set_client(fake_client) - - fake_sentry_span = MagicMock(spec=Transaction) - fake_sentry_span.set_context = MagicMock() - fake_sentry_span.finish = MagicMock() - - span_processor = SentrySpanProcessor() - span_processor._get_otel_context = MagicMock() - span_processor._update_span_with_otel_data = MagicMock() - span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span - - span_processor.on_end(otel_span) - - fake_sentry_span.set_context.assert_called_once() - span_processor._update_span_with_otel_data.assert_not_called() - fake_sentry_span.set_status.assert_called_once_with("ok") - fake_sentry_span.finish.assert_called_once() - - -def test_on_end_sentry_span(): - """ - Test on_end for a sentry Span. - """ - otel_span = MagicMock() - otel_span.name = "Sample OTel Span" - otel_span.end_time = time.time_ns() - otel_span.status = Status(StatusCode.OK) - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - - fake_client = MagicMock() - fake_client.options = {"instrumenter": "otel"} - sentry_sdk.get_global_scope().set_client(fake_client) - - fake_sentry_span = MagicMock(spec=Span) - fake_sentry_span.set_context = MagicMock() - fake_sentry_span.finish = MagicMock() - - span_processor = SentrySpanProcessor() - span_processor._get_otel_context = MagicMock() - span_processor._update_span_with_otel_data = MagicMock() - span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span - - span_processor.on_end(otel_span) - - fake_sentry_span.set_context.assert_not_called() - span_processor._update_span_with_otel_data.assert_called_once_with( - fake_sentry_span, otel_span - ) - fake_sentry_span.set_status.assert_called_once_with("ok") - fake_sentry_span.finish.assert_called_once() - - -def test_link_trace_context_to_error_event(): - """ - Test that the trace context is added to the error event. - """ - fake_client = MagicMock() - fake_client.options = {"instrumenter": "otel"} - sentry_sdk.get_global_scope().set_client(fake_client) - - span_id = "1234567890abcdef" - trace_id = "1234567890abcdef1234567890abcdef" - - fake_trace_context = { - "bla": "blub", - "foo": "bar", - "baz": 123, - } - - sentry_span = MagicMock() - sentry_span.get_trace_context = MagicMock(return_value=fake_trace_context) - - otel_span_map = { - span_id: sentry_span, - } - - span_context = SpanContext( - trace_id=int(trace_id, 16), - span_id=int(span_id, 16), - is_remote=True, - ) - otel_span = MagicMock() - otel_span.get_span_context = MagicMock(return_value=span_context) - - fake_event = {"event_id": "1234567890abcdef1234567890abcdef"} - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.span_processor.get_current_span", - return_value=otel_span, - ): - event = link_trace_context_to_error_event(fake_event, otel_span_map) - - assert event - assert event == fake_event # the event is changed in place inside the function - assert "contexts" in event - assert "trace" in event["contexts"] - assert event["contexts"]["trace"] == fake_trace_context - - -def test_pruning_old_spans_on_start(): - otel_span = MagicMock() - otel_span.name = "Sample OTel Span" - otel_span.start_time = time.time_ns() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = MagicMock() - otel_span.parent.span_id = int("abcdef1234567890", 16) - - parent_context = {} - fake_client = MagicMock() - fake_client.options = {"instrumenter": "otel", "debug": False} - fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" - sentry_sdk.get_global_scope().set_client(fake_client) - - span_processor = SentrySpanProcessor() - - span_processor.otel_span_map = { - "111111111abcdef": MagicMock(), # should stay - "2222222222abcdef": MagicMock(), # should go - "3333333333abcdef": MagicMock(), # should go - } - current_time_minutes = int(time.time() / 60) - span_processor.open_spans = { - current_time_minutes - 3: {"111111111abcdef"}, # should stay - current_time_minutes - - 11: {"2222222222abcdef", "3333333333abcdef"}, # should go - } - - span_processor.on_start(otel_span, parent_context) - assert sorted(list(span_processor.otel_span_map.keys())) == [ - "111111111abcdef", - "1234567890abcdef", - ] - assert sorted(list(span_processor.open_spans.values())) == [ - {"111111111abcdef"}, - {"1234567890abcdef"}, - ] - - -def test_pruning_old_spans_on_end(): - otel_span = MagicMock() - otel_span.name = "Sample OTel Span" - otel_span.start_time = time.time_ns() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = MagicMock() - otel_span.parent.span_id = int("abcdef1234567890", 16) - - fake_client = MagicMock() - fake_client.options = {"instrumenter": "otel"} - sentry_sdk.get_global_scope().set_client(fake_client) - - fake_sentry_span = MagicMock(spec=Span) - fake_sentry_span.set_context = MagicMock() - fake_sentry_span.finish = MagicMock() - - span_processor = SentrySpanProcessor() - span_processor._get_otel_context = MagicMock() - span_processor._update_span_with_otel_data = MagicMock() - - span_processor.otel_span_map = { - "111111111abcdef": MagicMock(), # should stay - "2222222222abcdef": MagicMock(), # should go - "3333333333abcdef": MagicMock(), # should go - "1234567890abcdef": fake_sentry_span, # should go (because it is closed) - } - current_time_minutes = int(time.time() / 60) - span_processor.open_spans = { - current_time_minutes: {"1234567890abcdef"}, # should go (because it is closed) - current_time_minutes - 3: {"111111111abcdef"}, # should stay - current_time_minutes - - 11: {"2222222222abcdef", "3333333333abcdef"}, # should go - } - - span_processor.on_end(otel_span) - assert sorted(list(span_processor.otel_span_map.keys())) == ["111111111abcdef"] - assert sorted(list(span_processor.open_spans.values())) == [{"111111111abcdef"}] diff --git a/tests/integrations/pymongo/test_pymongo.py b/tests/integrations/pymongo/test_pymongo.py index 10f1c9fba9..e5751854b7 100644 --- a/tests/integrations/pymongo/test_pymongo.py +++ b/tests/integrations/pymongo/test_pymongo.py @@ -1,4 +1,6 @@ -from sentry_sdk import capture_message, start_transaction +import re + +import sentry_sdk from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.pymongo import PyMongoIntegration, _strip_pii @@ -35,7 +37,7 @@ def test_transactions(sentry_init, capture_events, mongo_server, with_pii): connection = MongoClient(mongo_server.uri) - with start_transaction(): + with sentry_sdk.start_span(): list( connection["test_db"]["test_collection"].find({"foobar": 1}) ) # force query execution @@ -49,7 +51,7 @@ def test_transactions(sentry_init, capture_events, mongo_server, with_pii): (event,) = events (find, insert_success, insert_fail) = event["spans"] - common_tags = { + common_data = { "db.name": "test_db", "db.system": "mongodb", "net.peer.name": mongo_server.host, @@ -60,8 +62,7 @@ def test_transactions(sentry_init, capture_events, mongo_server, with_pii): assert span["data"][SPANDATA.DB_NAME] == "test_db" assert span["data"][SPANDATA.SERVER_ADDRESS] == "localhost" assert span["data"][SPANDATA.SERVER_PORT] == mongo_server.port - for field, value in common_tags.items(): - assert span["tags"][field] == value + for field, value in common_data.items(): assert span["data"][field] == value assert find["op"] == "db" @@ -69,22 +70,16 @@ def test_transactions(sentry_init, capture_events, mongo_server, with_pii): assert insert_fail["op"] == "db" assert find["data"]["db.operation"] == "find" - assert find["tags"]["db.operation"] == "find" assert insert_success["data"]["db.operation"] == "insert" - assert insert_success["tags"]["db.operation"] == "insert" assert insert_fail["data"]["db.operation"] == "insert" - assert insert_fail["tags"]["db.operation"] == "insert" assert find["description"].startswith('{"find') - assert insert_success["description"].startswith('{"insert') - assert insert_fail["description"].startswith('{"insert') + assert re.match("^{['\"]insert.*", insert_success["description"]) + assert re.match("^{['\"]insert.*", insert_fail["description"]) assert find["data"][SPANDATA.DB_MONGODB_COLLECTION] == "test_collection" - assert find["tags"][SPANDATA.DB_MONGODB_COLLECTION] == "test_collection" assert insert_success["data"][SPANDATA.DB_MONGODB_COLLECTION] == "test_collection" - assert insert_success["tags"][SPANDATA.DB_MONGODB_COLLECTION] == "test_collection" assert insert_fail["data"][SPANDATA.DB_MONGODB_COLLECTION] == "erroneous" - assert insert_fail["tags"][SPANDATA.DB_MONGODB_COLLECTION] == "erroneous" if with_pii: assert "1" in find["description"] assert "2" in insert_success["description"] @@ -99,16 +94,22 @@ def test_transactions(sentry_init, capture_events, mongo_server, with_pii): and "4" not in insert_fail["description"] ) - assert find["tags"]["status"] == "ok" - assert insert_success["tags"]["status"] == "ok" - assert insert_fail["tags"]["status"] == "internal_error" - -@pytest.mark.parametrize("with_pii", [False, True]) -def test_breadcrumbs(sentry_init, capture_events, mongo_server, with_pii): +@pytest.mark.parametrize( + "with_pii,traces_sample_rate", + [ + [False, 0.0], + [False, 1.0], + [True, 0.0], + [True, 1.0], + ], +) +def test_breadcrumbs( + sentry_init, capture_events, mongo_server, with_pii, traces_sample_rate +): sentry_init( integrations=[PyMongoIntegration()], - traces_sample_rate=1.0, + traces_sample_rate=traces_sample_rate, send_default_pii=with_pii, ) events = capture_events() @@ -118,7 +119,7 @@ def test_breadcrumbs(sentry_init, capture_events, mongo_server, with_pii): list( connection["test_db"]["test_collection"].find({"foobar": 1}) ) # force query execution - capture_message("hi") + sentry_sdk.capture_message("hi") (event,) = events (crumb,) = event["breadcrumbs"]["values"] @@ -445,7 +446,7 @@ def test_span_origin(sentry_init, capture_events, mongo_server): connection = MongoClient(mongo_server.uri) - with start_transaction(): + with sentry_sdk.start_span(): list( connection["test_db"]["test_collection"].find({"foobar": 1}) ) # force query execution diff --git a/tests/integrations/ray/test_ray.py b/tests/integrations/ray/test_ray.py index b5bdd473c4..b6cc8b8025 100644 --- a/tests/integrations/ray/test_ray.py +++ b/tests/integrations/ray/test_ray.py @@ -85,42 +85,42 @@ def example_task(): else: example_task = ray.remote(example_task) - with sentry_sdk.start_transaction(op="task", name="ray test transaction"): + with sentry_sdk.start_span(op="test", name="ray client root span"): worker_envelopes = ray.get(example_task.remote()) client_envelope = sentry_sdk.get_client().transport.envelopes[0] - client_transaction = client_envelope.get_transaction_event() - assert client_transaction["transaction"] == "ray test transaction" - assert client_transaction["transaction_info"] == {"source": "custom"} + client_root_span = client_envelope.get_transaction_event() + assert client_root_span["transaction"] == "ray client root span" + assert client_root_span["transaction_info"] == {"source": "custom"} worker_envelope = worker_envelopes[0] - worker_transaction = worker_envelope.get_transaction_event() + worker_root_span = worker_envelope.get_transaction_event() assert ( - worker_transaction["transaction"] + worker_root_span["transaction"] == "tests.integrations.ray.test_ray.test_tracing_in_ray_tasks..example_task" ) - assert worker_transaction["transaction_info"] == {"source": "task"} + assert worker_root_span["transaction_info"] == {"source": "task"} - (span,) = client_transaction["spans"] + (span,) = client_root_span["spans"] assert span["op"] == "queue.submit.ray" assert span["origin"] == "auto.queue.ray" assert ( span["description"] == "tests.integrations.ray.test_ray.test_tracing_in_ray_tasks..example_task" ) - assert span["parent_span_id"] == client_transaction["contexts"]["trace"]["span_id"] - assert span["trace_id"] == client_transaction["contexts"]["trace"]["trace_id"] + assert span["parent_span_id"] == client_root_span["contexts"]["trace"]["span_id"] + assert span["trace_id"] == client_root_span["contexts"]["trace"]["trace_id"] - (span,) = worker_transaction["spans"] + (span,) = worker_root_span["spans"] assert span["op"] == "task" assert span["origin"] == "manual" assert span["description"] == "example task step" - assert span["parent_span_id"] == worker_transaction["contexts"]["trace"]["span_id"] - assert span["trace_id"] == worker_transaction["contexts"]["trace"]["trace_id"] + assert span["parent_span_id"] == worker_root_span["contexts"]["trace"]["span_id"] + assert span["trace_id"] == worker_root_span["contexts"]["trace"]["trace_id"] assert ( - client_transaction["contexts"]["trace"]["trace_id"] - == worker_transaction["contexts"]["trace"]["trace_id"] + client_root_span["contexts"]["trace"]["trace_id"] + == worker_root_span["contexts"]["trace"]["trace_id"] ) @@ -140,7 +140,7 @@ def test_errors_in_ray_tasks(): def example_task(): 1 / 0 - with sentry_sdk.start_transaction(op="task", name="ray test transaction"): + with sentry_sdk.start_span(op="test", name="ray client root span"): with pytest.raises(ZeroDivisionError): future = example_task.remote() ray.get(future) @@ -175,22 +175,26 @@ def __init__(self): self.n = 0 def increment(self): - with sentry_sdk.start_span(op="task", name="example actor execution"): + with sentry_sdk.start_span( + op="test", + name="custom span in actor execution", + only_as_child_span=True, + ): self.n += 1 return sentry_sdk.get_client().transport.envelopes - with sentry_sdk.start_transaction(op="task", name="ray test transaction"): + with sentry_sdk.start_span(op="test", name="ray client root span"): counter = Counter.remote() worker_envelopes = ray.get(counter.increment.remote()) client_envelope = sentry_sdk.get_client().transport.envelopes[0] - client_transaction = client_envelope.get_transaction_event() + client_root_span = client_envelope.get_transaction_event() # Spans for submitting the actor task are not created (actors are not supported yet) - assert client_transaction["spans"] == [] + assert client_root_span["spans"] == [] - # Transaction are not yet created when executing ray actors (actors are not supported yet) + # Root spans are not yet automatically created when executing ray actors (actors are not supported yet) assert worker_envelopes == [] @@ -212,12 +216,16 @@ def __init__(self): self.n = 0 def increment(self): - with sentry_sdk.start_span(op="task", name="example actor execution"): + with sentry_sdk.start_span( + op="test", + name="custom span in actor execution", + only_as_child_span=True, + ): 1 / 0 return sentry_sdk.get_client().transport.envelopes - with sentry_sdk.start_transaction(op="task", name="ray test transaction"): + with sentry_sdk.start_span(op="test", name="ray client root span"): with pytest.raises(ZeroDivisionError): counter = Counter.remote() future = counter.increment.remote() diff --git a/tests/integrations/redis/asyncio/test_redis_asyncio.py b/tests/integrations/redis/asyncio/test_redis_asyncio.py index 17130b337b..b24deda6f4 100644 --- a/tests/integrations/redis/asyncio/test_redis_asyncio.py +++ b/tests/integrations/redis/asyncio/test_redis_asyncio.py @@ -1,6 +1,6 @@ import pytest -from sentry_sdk import capture_message, start_transaction +import sentry_sdk from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.redis import RedisIntegration from tests.conftest import ApproxDict @@ -16,7 +16,7 @@ async def test_async_basic(sentry_init, capture_events): connection = FakeRedis() await connection.get("foobar") - capture_message("hi") + sentry_sdk.capture_message("hi") (event,) = events (crumb,) = event["breadcrumbs"]["values"] @@ -54,7 +54,7 @@ async def test_async_redis_pipeline( events = capture_events() connection = FakeRedis() - with start_transaction(): + with sentry_sdk.start_span(): pipeline = connection.pipeline(transaction=is_transaction) pipeline.get("foo") pipeline.set("bar", 1) @@ -65,12 +65,10 @@ async def test_async_redis_pipeline( (span,) = event["spans"] assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" + assert span["data"]["redis.commands.count"] == 3 + assert span["data"]["redis.commands.first_ten"] == expected_first_ten assert span["data"] == ApproxDict( { - "redis.commands": { - "count": 3, - "first_ten": expected_first_ten, - }, SPANDATA.DB_SYSTEM: "redis", SPANDATA.DB_NAME: "0", SPANDATA.SERVER_ADDRESS: connection.connection_pool.connection_kwargs.get( @@ -80,8 +78,8 @@ async def test_async_redis_pipeline( } ) assert span["tags"] == { - "redis.transaction": is_transaction, - "redis.is_cluster": False, + "redis.transaction": str(is_transaction), + "redis.is_cluster": "False", } @@ -94,7 +92,7 @@ async def test_async_span_origin(sentry_init, capture_events): events = capture_events() connection = FakeRedis() - with start_transaction(name="custom_transaction"): + with sentry_sdk.start_span(name="custom_transaction"): # default case await connection.set("somekey", "somevalue") diff --git a/tests/integrations/redis/cluster/test_redis_cluster.py b/tests/integrations/redis/cluster/test_redis_cluster.py index 83d1b45cc9..936718a5a5 100644 --- a/tests/integrations/redis/cluster/test_redis_cluster.py +++ b/tests/integrations/redis/cluster/test_redis_cluster.py @@ -1,7 +1,7 @@ import pytest -from sentry_sdk import capture_message + +import sentry_sdk from sentry_sdk.consts import SPANDATA -from sentry_sdk.api import start_transaction from sentry_sdk.integrations.redis import RedisIntegration from tests.conftest import ApproxDict @@ -27,7 +27,7 @@ def test_rediscluster_breadcrumb(sentry_init, capture_events): rc = redis.RedisCluster(host="localhost", port=6379) rc.get("foobar") - capture_message("hi") + sentry_sdk.capture_message("hi") (event,) = events crumbs = event["breadcrumbs"]["values"] @@ -68,7 +68,7 @@ def test_rediscluster_basic(sentry_init, capture_events, send_default_pii, descr ) events = capture_events() - with start_transaction(): + with sentry_sdk.start_span(): rc = redis.RedisCluster(host="localhost", port=6379) rc.set("bar", 1) @@ -94,7 +94,7 @@ def test_rediscluster_basic(sentry_init, capture_events, send_default_pii, descr assert span["tags"] == { "db.operation": "SET", "redis.command": "SET", - "redis.is_cluster": True, + "redis.is_cluster": "True", "redis.key": "bar", } @@ -117,7 +117,7 @@ def test_rediscluster_pipeline( events = capture_events() rc = redis.RedisCluster(host="localhost", port=6379) - with start_transaction(): + with sentry_sdk.start_span(): pipeline = rc.pipeline() pipeline.get("foo") pipeline.set("bar", 1) @@ -128,12 +128,10 @@ def test_rediscluster_pipeline( (span,) = event["spans"] assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" + assert span["data"]["redis.commands.count"] == 3 + assert span["data"]["redis.commands.first_ten"] == expected_first_ten assert span["data"] == ApproxDict( { - "redis.commands": { - "count": 3, - "first_ten": expected_first_ten, - }, SPANDATA.DB_SYSTEM: "redis", # ClusterNode converts localhost to 127.0.0.1 SPANDATA.SERVER_ADDRESS: "127.0.0.1", @@ -141,8 +139,8 @@ def test_rediscluster_pipeline( } ) assert span["tags"] == { - "redis.transaction": False, # For Cluster, this is always False - "redis.is_cluster": True, + "redis.transaction": "False", # For Cluster, this is always False + "redis.is_cluster": "True", } @@ -154,7 +152,7 @@ def test_rediscluster_span_origin(sentry_init, capture_events): events = capture_events() rc = redis.RedisCluster(host="localhost", port=6379) - with start_transaction(name="custom_transaction"): + with sentry_sdk.start_span(name="custom_transaction"): # default case rc.set("somekey", "somevalue") diff --git a/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py b/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py index 993a2962ca..44aa058b8a 100644 --- a/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py +++ b/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py @@ -1,6 +1,6 @@ import pytest -from sentry_sdk import capture_message, start_transaction +import sentry_sdk from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.redis import RedisIntegration from tests.conftest import ApproxDict @@ -40,7 +40,7 @@ async def test_async_breadcrumb(sentry_init, capture_events): connection = cluster.RedisCluster(host="localhost", port=6379) await connection.get("foobar") - capture_message("hi") + sentry_sdk.capture_message("hi") (event,) = events (crumb,) = event["breadcrumbs"]["values"] @@ -78,7 +78,7 @@ async def test_async_basic(sentry_init, capture_events, send_default_pii, descri events = capture_events() connection = cluster.RedisCluster(host="localhost", port=6379) - with start_transaction(): + with sentry_sdk.start_span(): await connection.set("bar", 1) (event,) = events @@ -94,7 +94,7 @@ async def test_async_basic(sentry_init, capture_events, send_default_pii, descri } ) assert span["tags"] == { - "redis.is_cluster": True, + "redis.is_cluster": "True", "db.operation": "SET", "redis.command": "SET", "redis.key": "bar", @@ -120,7 +120,7 @@ async def test_async_redis_pipeline( events = capture_events() connection = cluster.RedisCluster(host="localhost", port=6379) - with start_transaction(): + with sentry_sdk.start_span(): pipeline = connection.pipeline() pipeline.get("foo") pipeline.set("bar", 1) @@ -131,12 +131,10 @@ async def test_async_redis_pipeline( (span,) = event["spans"] assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" + assert span["data"]["redis.commands.count"] == 3 + assert span["data"]["redis.commands.first_ten"] == expected_first_ten assert span["data"] == ApproxDict( { - "redis.commands": { - "count": 3, - "first_ten": expected_first_ten, - }, SPANDATA.DB_SYSTEM: "redis", # ClusterNode converts localhost to 127.0.0.1 SPANDATA.SERVER_ADDRESS: "127.0.0.1", @@ -144,8 +142,8 @@ async def test_async_redis_pipeline( } ) assert span["tags"] == { - "redis.transaction": False, - "redis.is_cluster": True, + "redis.transaction": "False", + "redis.is_cluster": "True", } @@ -158,7 +156,7 @@ async def test_async_span_origin(sentry_init, capture_events): events = capture_events() connection = cluster.RedisCluster(host="localhost", port=6379) - with start_transaction(name="custom_transaction"): + with sentry_sdk.start_span(name="custom_transaction"): # default case await connection.set("somekey", "somevalue") diff --git a/tests/integrations/redis/test_redis.py b/tests/integrations/redis/test_redis.py index 5173885f33..4faf4b7fa2 100644 --- a/tests/integrations/redis/test_redis.py +++ b/tests/integrations/redis/test_redis.py @@ -3,7 +3,7 @@ import pytest from fakeredis import FakeStrictRedis -from sentry_sdk import capture_message, start_transaction +import sentry_sdk from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.redis import RedisIntegration @@ -23,7 +23,7 @@ def test_basic(sentry_init, capture_events): connection = FakeStrictRedis() connection.get("foobar") - capture_message("hi") + sentry_sdk.capture_message("hi") (event,) = events (crumb,) = event["breadcrumbs"]["values"] @@ -60,7 +60,7 @@ def test_redis_pipeline( events = capture_events() connection = FakeStrictRedis() - with start_transaction(): + with sentry_sdk.start_span(name="redis"): pipeline = connection.pipeline(transaction=is_transaction) pipeline.get("foo") pipeline.set("bar", 1) @@ -72,17 +72,15 @@ def test_redis_pipeline( assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" assert span["data"][SPANDATA.DB_SYSTEM] == "redis" - assert span["data"]["redis.commands"] == { - "count": 3, - "first_ten": expected_first_ten, - } + assert span["data"]["redis.commands.count"] == 3 + assert span["data"]["redis.commands.first_ten"] == expected_first_ten assert span["tags"] == { - "redis.transaction": is_transaction, - "redis.is_cluster": False, + "redis.transaction": str(is_transaction), + "redis.is_cluster": "False", } -def test_sensitive_data(sentry_init, capture_events): +def test_sensitive_data(sentry_init, capture_events, render_span_tree): # fakeredis does not support the AUTH command, so we need to mock it with mock.patch( "sentry_sdk.integrations.redis.utils._COMMANDS_INCLUDING_SENSITIVE_DATA", @@ -96,18 +94,23 @@ def test_sensitive_data(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis() - with start_transaction(): + with sentry_sdk.start_span(name="redis"): connection.get( "this is super secret" ) # because fakeredis does not support AUTH we use GET instead (event,) = events - spans = event["spans"] - assert spans[0]["op"] == "db.redis" - assert spans[0]["description"] == "GET [Filtered]" + assert event["transaction"] == "redis" + assert ( + render_span_tree(event) + == """\ +- op=null: description=null + - op="db.redis": description="GET [Filtered]"\ +""" + ) -def test_pii_data_redacted(sentry_init, capture_events): +def test_pii_data_redacted(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[RedisIntegration()], traces_sample_rate=1.0, @@ -115,22 +118,27 @@ def test_pii_data_redacted(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis() - with start_transaction(): + with sentry_sdk.start_span(name="redis"): connection.set("somekey1", "my secret string1") connection.set("somekey2", "my secret string2") connection.get("somekey2") connection.delete("somekey1", "somekey2") (event,) = events - spans = event["spans"] - assert spans[0]["op"] == "db.redis" - assert spans[0]["description"] == "SET 'somekey1' [Filtered]" - assert spans[1]["description"] == "SET 'somekey2' [Filtered]" - assert spans[2]["description"] == "GET 'somekey2'" - assert spans[3]["description"] == "DEL 'somekey1' [Filtered]" + assert event["transaction"] == "redis" + assert ( + render_span_tree(event) + == """\ +- op=null: description=null + - op="db.redis": description="SET 'somekey1' [Filtered]" + - op="db.redis": description="SET 'somekey2' [Filtered]" + - op="db.redis": description="GET 'somekey2'" + - op="db.redis": description="DEL 'somekey1' [Filtered]"\ +""" + ) -def test_pii_data_sent(sentry_init, capture_events): +def test_pii_data_sent(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[RedisIntegration()], traces_sample_rate=1.0, @@ -139,22 +147,27 @@ def test_pii_data_sent(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis() - with start_transaction(): + with sentry_sdk.start_span(name="redis"): connection.set("somekey1", "my secret string1") connection.set("somekey2", "my secret string2") connection.get("somekey2") connection.delete("somekey1", "somekey2") (event,) = events - spans = event["spans"] - assert spans[0]["op"] == "db.redis" - assert spans[0]["description"] == "SET 'somekey1' 'my secret string1'" - assert spans[1]["description"] == "SET 'somekey2' 'my secret string2'" - assert spans[2]["description"] == "GET 'somekey2'" - assert spans[3]["description"] == "DEL 'somekey1' 'somekey2'" + assert event["transaction"] == "redis" + assert ( + render_span_tree(event) + == """\ +- op=null: description=null + - op="db.redis": description="SET 'somekey1' 'my secret string1'" + - op="db.redis": description="SET 'somekey2' 'my secret string2'" + - op="db.redis": description="GET 'somekey2'" + - op="db.redis": description="DEL 'somekey1' 'somekey2'"\ +""" + ) -def test_data_truncation(sentry_init, capture_events): +def test_data_truncation(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[RedisIntegration()], traces_sample_rate=1.0, @@ -163,22 +176,25 @@ def test_data_truncation(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis() - with start_transaction(): + with sentry_sdk.start_span(name="redis"): long_string = "a" * 100000 connection.set("somekey1", long_string) short_string = "b" * 10 connection.set("somekey2", short_string) (event,) = events - spans = event["spans"] - assert spans[0]["op"] == "db.redis" - assert spans[0]["description"] == "SET 'somekey1' '%s..." % ( - long_string[: 1024 - len("...") - len("SET 'somekey1' '")], + assert event["transaction"] == "redis" + assert ( + render_span_tree(event) + == f"""\ +- op=null: description=null + - op="db.redis": description="SET 'somekey1' '{long_string[: 1024 - len("...") - len("SET 'somekey1' '")]}..." + - op="db.redis": description="SET 'somekey2' 'bbbbbbbbbb'"\ +""" # noqa: E221 ) - assert spans[1]["description"] == "SET 'somekey2' '%s'" % (short_string,) -def test_data_truncation_custom(sentry_init, capture_events): +def test_data_truncation_custom(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[RedisIntegration(max_data_size=30)], traces_sample_rate=1.0, @@ -187,19 +203,22 @@ def test_data_truncation_custom(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis() - with start_transaction(): + with sentry_sdk.start_span(name="redis"): long_string = "a" * 100000 connection.set("somekey1", long_string) short_string = "b" * 10 connection.set("somekey2", short_string) (event,) = events - spans = event["spans"] - assert spans[0]["op"] == "db.redis" - assert spans[0]["description"] == "SET 'somekey1' '%s..." % ( - long_string[: 30 - len("...") - len("SET 'somekey1' '")], + assert event["transaction"] == "redis" + assert ( + render_span_tree(event) + == f"""\ +- op=null: description=null + - op="db.redis": description="SET 'somekey1' '{long_string[: 30 - len("...") - len("SET 'somekey1' '")]}..." + - op="db.redis": description="SET 'somekey2' '{short_string}'"\ +""" # noqa: E221 ) - assert spans[1]["description"] == "SET 'somekey2' '%s'" % (short_string,) def test_breadcrumbs(sentry_init, capture_events): @@ -216,7 +235,7 @@ def test_breadcrumbs(sentry_init, capture_events): short_string = "b" * 10 connection.set("somekey2", short_string) - capture_message("hi") + sentry_sdk.capture_message("hi") (event,) = events crumbs = event["breadcrumbs"]["values"] @@ -254,7 +273,7 @@ def test_db_connection_attributes_client(sentry_init, capture_events): ) events = capture_events() - with start_transaction(): + with sentry_sdk.start_span(name="redis"): connection = FakeStrictRedis(connection_pool=MOCK_CONNECTION_POOL) connection.get("foobar") @@ -276,7 +295,7 @@ def test_db_connection_attributes_pipeline(sentry_init, capture_events): ) events = capture_events() - with start_transaction(): + with sentry_sdk.start_span(name="redis"): connection = FakeStrictRedis(connection_pool=MOCK_CONNECTION_POOL) pipeline = connection.pipeline(transaction=False) pipeline.get("foo") @@ -303,7 +322,7 @@ def test_span_origin(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis() - with start_transaction(name="custom_transaction"): + with sentry_sdk.start_span(name="custom_transaction"): # default case connection.set("somekey", "somevalue") diff --git a/tests/integrations/redis/test_redis_cache_module.py b/tests/integrations/redis/test_redis_cache_module.py index f118aa53f5..ea597e0d92 100644 --- a/tests/integrations/redis/test_redis_cache_module.py +++ b/tests/integrations/redis/test_redis_cache_module.py @@ -14,7 +14,7 @@ FAKEREDIS_VERSION = parse_version(fakeredis.__version__) -def test_no_cache_basic(sentry_init, capture_events): +def test_no_cache_basic(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[ RedisIntegration(), @@ -24,16 +24,21 @@ def test_no_cache_basic(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="cache"): connection.get("mycachekey") (event,) = events - spans = event["spans"] - assert len(spans) == 1 - assert spans[0]["op"] == "db.redis" + assert event["transaction"] == "cache" + assert ( + render_span_tree(event) + == """\ +- op=null: description=null + - op="db.redis": description="GET 'mycachekey'"\ +""" + ) -def test_cache_basic(sentry_init, capture_events): +def test_cache_basic(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[ RedisIntegration( @@ -45,7 +50,7 @@ def test_cache_basic(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="cache"): connection.hget("mycachekey", "myfield") connection.get("mycachekey") connection.set("mycachekey1", "bla") @@ -53,31 +58,26 @@ def test_cache_basic(sentry_init, capture_events): connection.mget("mycachekey1", "mycachekey2") (event,) = events - spans = event["spans"] - assert len(spans) == 9 - - # no cache support for hget command - assert spans[0]["op"] == "db.redis" - assert spans[0]["tags"]["redis.command"] == "HGET" - - assert spans[1]["op"] == "cache.get" - assert spans[2]["op"] == "db.redis" - assert spans[2]["tags"]["redis.command"] == "GET" - - assert spans[3]["op"] == "cache.put" - assert spans[4]["op"] == "db.redis" - assert spans[4]["tags"]["redis.command"] == "SET" - - assert spans[5]["op"] == "cache.put" - assert spans[6]["op"] == "db.redis" - assert spans[6]["tags"]["redis.command"] == "SETEX" - - assert spans[7]["op"] == "cache.get" - assert spans[8]["op"] == "db.redis" - assert spans[8]["tags"]["redis.command"] == "MGET" + assert event["transaction"] == "cache" + # no cache support for HGET command + assert ( + render_span_tree(event) + == """\ +- op=null: description=null + - op="db.redis": description="HGET 'mycachekey' [Filtered]" + - op="cache.get": description="mycachekey" + - op="db.redis": description="GET 'mycachekey'" + - op="cache.put": description="mycachekey1" + - op="db.redis": description="SET 'mycachekey1' [Filtered]" + - op="cache.put": description="mycachekey2" + - op="db.redis": description="SETEX 'mycachekey2' [Filtered] [Filtered]" + - op="cache.get": description="mycachekey1, mycachekey2" + - op="db.redis": description="MGET 'mycachekey1' [Filtered]"\ +""" + ) -def test_cache_keys(sentry_init, capture_events): +def test_cache_keys(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[ RedisIntegration( @@ -89,30 +89,26 @@ def test_cache_keys(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="cache"): connection.get("somethingelse") connection.get("blub") connection.get("blubkeything") connection.get("bl") (event,) = events - spans = event["spans"] - assert len(spans) == 6 - assert spans[0]["op"] == "db.redis" - assert spans[0]["description"] == "GET 'somethingelse'" - - assert spans[1]["op"] == "cache.get" - assert spans[1]["description"] == "blub" - assert spans[2]["op"] == "db.redis" - assert spans[2]["description"] == "GET 'blub'" - - assert spans[3]["op"] == "cache.get" - assert spans[3]["description"] == "blubkeything" - assert spans[4]["op"] == "db.redis" - assert spans[4]["description"] == "GET 'blubkeything'" - - assert spans[5]["op"] == "db.redis" - assert spans[5]["description"] == "GET 'bl'" + assert event["transaction"] == "cache" + assert ( + render_span_tree(event) + == """\ +- op=null: description=null + - op="db.redis": description="GET 'somethingelse'" + - op="cache.get": description="blub" + - op="db.redis": description="GET 'blub'" + - op="cache.get": description="blubkeything" + - op="db.redis": description="GET 'blubkeything'" + - op="db.redis": description="GET 'bl'"\ +""" + ) def test_cache_data(sentry_init, capture_events): @@ -127,13 +123,14 @@ def test_cache_data(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis(host="mycacheserver.io", port=6378) - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="cache"): connection.get("mycachekey") connection.set("mycachekey", "事实胜于雄辩") connection.get("mycachekey") (event,) = events - spans = event["spans"] + assert event["transaction"] == "cache" + spans = sorted(event["spans"], key=lambda x: x["start_timestamp"]) assert len(spans) == 6 @@ -210,7 +207,7 @@ def test_cache_prefixes(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="cache"): connection.mget("yes", "no") connection.mget("no", 1, "yes") connection.mget("no", "yes.1", "yes.2") @@ -221,8 +218,9 @@ def test_cache_prefixes(sentry_init, capture_events): connection.mget(uuid.uuid4().bytes, "yes") (event,) = events + assert event["transaction"] == "cache" - spans = event["spans"] + spans = sorted(event["spans"], key=lambda x: x["start_timestamp"]) assert len(spans) == 13 # 8 db spans + 5 cache spans cache_spans = [span for span in spans if span["op"] == "cache.get"] diff --git a/tests/integrations/redis/test_redis_cache_module_async.py b/tests/integrations/redis/test_redis_cache_module_async.py index d607f92fbd..cd1804a9a3 100644 --- a/tests/integrations/redis/test_redis_cache_module_async.py +++ b/tests/integrations/redis/test_redis_cache_module_async.py @@ -21,7 +21,7 @@ @pytest.mark.asyncio -async def test_no_cache_basic(sentry_init, capture_events): +async def test_no_cache_basic(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[ RedisIntegration(), @@ -31,17 +31,22 @@ async def test_no_cache_basic(sentry_init, capture_events): events = capture_events() connection = FakeRedisAsync() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="redis"): await connection.get("myasynccachekey") (event,) = events - spans = event["spans"] - assert len(spans) == 1 - assert spans[0]["op"] == "db.redis" + assert event["transaction"] == "redis" + assert ( + render_span_tree(event) + == """\ +- op=null: description=null + - op="db.redis": description="GET 'myasynccachekey'"\ +""" + ) @pytest.mark.asyncio -async def test_cache_basic(sentry_init, capture_events): +async def test_cache_basic(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[ RedisIntegration( @@ -53,19 +58,23 @@ async def test_cache_basic(sentry_init, capture_events): events = capture_events() connection = FakeRedisAsync() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="redis"): await connection.get("myasynccachekey") (event,) = events - spans = event["spans"] - assert len(spans) == 2 - - assert spans[0]["op"] == "cache.get" - assert spans[1]["op"] == "db.redis" + assert event["transaction"] == "redis" + assert ( + render_span_tree(event) + == """\ +- op=null: description=null + - op="cache.get": description="myasynccachekey" + - op="db.redis": description="GET 'myasynccachekey'"\ +""" + ) @pytest.mark.asyncio -async def test_cache_keys(sentry_init, capture_events): +async def test_cache_keys(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[ RedisIntegration( @@ -77,30 +86,26 @@ async def test_cache_keys(sentry_init, capture_events): events = capture_events() connection = FakeRedisAsync() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="redis"): await connection.get("asomethingelse") await connection.get("ablub") await connection.get("ablubkeything") await connection.get("abl") (event,) = events - spans = event["spans"] - assert len(spans) == 6 - assert spans[0]["op"] == "db.redis" - assert spans[0]["description"] == "GET 'asomethingelse'" - - assert spans[1]["op"] == "cache.get" - assert spans[1]["description"] == "ablub" - assert spans[2]["op"] == "db.redis" - assert spans[2]["description"] == "GET 'ablub'" - - assert spans[3]["op"] == "cache.get" - assert spans[3]["description"] == "ablubkeything" - assert spans[4]["op"] == "db.redis" - assert spans[4]["description"] == "GET 'ablubkeything'" - - assert spans[5]["op"] == "db.redis" - assert spans[5]["description"] == "GET 'abl'" + assert event["transaction"] == "redis" + assert ( + render_span_tree(event) + == """\ +- op=null: description=null + - op="db.redis": description="GET 'asomethingelse'" + - op="cache.get": description="ablub" + - op="db.redis": description="GET 'ablub'" + - op="cache.get": description="ablubkeything" + - op="db.redis": description="GET 'ablubkeything'" + - op="db.redis": description="GET 'abl'"\ +""" + ) @pytest.mark.asyncio @@ -116,13 +121,14 @@ async def test_cache_data(sentry_init, capture_events): events = capture_events() connection = FakeRedisAsync(host="mycacheserver.io", port=6378) - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="redis"): await connection.get("myasynccachekey") await connection.set("myasynccachekey", "事实胜于雄辩") await connection.get("myasynccachekey") (event,) = events - spans = event["spans"] + assert event["transaction"] == "redis" + spans = sorted(event["spans"], key=lambda x: x["start_timestamp"]) assert len(spans) == 6 diff --git a/tests/integrations/redis_py_cluster_legacy/test_redis_py_cluster_legacy.py b/tests/integrations/redis_py_cluster_legacy/test_redis_py_cluster_legacy.py index 36a27d569d..f6d9e04847 100644 --- a/tests/integrations/redis_py_cluster_legacy/test_redis_py_cluster_legacy.py +++ b/tests/integrations/redis_py_cluster_legacy/test_redis_py_cluster_legacy.py @@ -4,7 +4,7 @@ import rediscluster from sentry_sdk import capture_message -from sentry_sdk.api import start_transaction +from sentry_sdk.api import start_span from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.redis import RedisIntegration from tests.conftest import ApproxDict @@ -84,7 +84,7 @@ def test_rediscluster_pipeline( events = capture_events() rc = rediscluster.RedisCluster(connection_pool=MOCK_CONNECTION_POOL) - with start_transaction(): + with start_span(name="redis"): pipeline = rc.pipeline() pipeline.get("foo") pipeline.set("bar", 1) @@ -95,12 +95,10 @@ def test_rediscluster_pipeline( (span,) = event["spans"] assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" + assert span["data"]["redis.commands.count"] == 3 + assert span["data"]["redis.commands.first_ten"] == expected_first_ten assert span["data"] == ApproxDict( { - "redis.commands": { - "count": 3, - "first_ten": expected_first_ten, - }, SPANDATA.DB_SYSTEM: "redis", SPANDATA.DB_NAME: "1", SPANDATA.SERVER_ADDRESS: "localhost", @@ -108,8 +106,8 @@ def test_rediscluster_pipeline( } ) assert span["tags"] == { - "redis.transaction": False, # For Cluster, this is always False - "redis.is_cluster": True, + "redis.transaction": "False", # For Cluster, this is always False + "redis.is_cluster": "True", } @@ -122,7 +120,7 @@ def test_db_connection_attributes_client(sentry_init, capture_events, redisclust events = capture_events() rc = rediscluster_cls(connection_pool=MOCK_CONNECTION_POOL) - with start_transaction(): + with start_span(name="redis"): rc.get("foobar") (event,) = events @@ -149,7 +147,7 @@ def test_db_connection_attributes_pipeline( events = capture_events() rc = rediscluster.RedisCluster(connection_pool=MOCK_CONNECTION_POOL) - with start_transaction(): + with start_span(name="redis"): pipeline = rc.pipeline() pipeline.get("foo") pipeline.execute() @@ -158,12 +156,11 @@ def test_db_connection_attributes_pipeline( (span,) = event["spans"] assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" + assert span["data"]["redis.commands.count"] == 1 + assert span["data"]["redis.commands.first_ten"] == ["GET 'foo'"] + assert span["data"] == ApproxDict( { - "redis.commands": { - "count": 1, - "first_ten": ["GET 'foo'"], - }, SPANDATA.DB_SYSTEM: "redis", SPANDATA.DB_NAME: "1", SPANDATA.SERVER_ADDRESS: "localhost", diff --git a/tests/integrations/requests/test_requests.py b/tests/integrations/requests/test_requests.py index 8cfc0f932f..3862763a75 100644 --- a/tests/integrations/requests/test_requests.py +++ b/tests/integrations/requests/test_requests.py @@ -43,8 +43,8 @@ def test_crumb_capture(sentry_init, capture_events): @pytest.mark.parametrize( "status_code,level", [ - (200, None), - (301, None), + (200, "info"), + (301, "info"), (403, "warning"), (405, "warning"), (500, "error"), @@ -66,12 +66,7 @@ def test_crumb_capture_client_error(sentry_init, capture_events, status_code, le (crumb,) = event["breadcrumbs"]["values"] assert crumb["type"] == "http" assert crumb["category"] == "httplib" - - if level is None: - assert "level" not in crumb - else: - assert crumb["level"] == level - + assert crumb["level"] == level assert crumb["data"] == ApproxDict( { "url": url, diff --git a/tests/integrations/rq/test_rq.py b/tests/integrations/rq/test_rq.py index e445b588be..a57a3d0dec 100644 --- a/tests/integrations/rq/test_rq.py +++ b/tests/integrations/rq/test_rq.py @@ -5,7 +5,6 @@ from fakeredis import FakeStrictRedis import sentry_sdk -from sentry_sdk import start_transaction from sentry_sdk.integrations.rq import RqIntegration from sentry_sdk.utils import parse_version @@ -119,7 +118,9 @@ def test_transaction_with_error( ) assert envelope["type"] == "transaction" - assert envelope["contexts"]["trace"] == error_event["contexts"]["trace"] + assert envelope["contexts"]["trace"] == DictionaryContaining( + error_event["contexts"]["trace"] + ) assert envelope["transaction"] == error_event["transaction"] assert envelope["extra"]["rq-job"] == DictionaryContaining( { @@ -150,8 +151,7 @@ def test_error_has_trace_context_if_tracing_disabled( def test_tracing_enabled( - sentry_init, - capture_events, + sentry_init, capture_events, DictionaryContaining # noqa: N803 ): sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0) events = capture_events() @@ -159,16 +159,17 @@ def test_tracing_enabled( queue = rq.Queue(connection=FakeStrictRedis()) worker = rq.SimpleWorker([queue], connection=queue.connection) - with start_transaction(op="rq transaction") as transaction: - queue.enqueue(crashing_job, foo=None) - worker.work(burst=True) + queue.enqueue(crashing_job, foo=None) + worker.work(burst=True) - error_event, envelope, _ = events + error_event, transaction = events assert error_event["transaction"] == "tests.integrations.rq.test_rq.crashing_job" - assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id - - assert envelope["contexts"]["trace"] == error_event["contexts"]["trace"] + assert transaction["transaction"] == "tests.integrations.rq.test_rq.crashing_job" + assert ( + DictionaryContaining(error_event["contexts"]["trace"]) + == transaction["contexts"]["trace"] + ) def test_tracing_disabled( @@ -221,34 +222,33 @@ def test_transaction_no_error( ) -def test_traces_sampler_gets_correct_values_in_sampling_context( - sentry_init, DictionaryContaining, ObjectDescribedBy # noqa:N803 -): +def test_traces_sampler_gets_correct_values_in_sampling_context(sentry_init): traces_sampler = mock.Mock(return_value=True) sentry_init(integrations=[RqIntegration()], traces_sampler=traces_sampler) queue = rq.Queue(connection=FakeStrictRedis()) worker = rq.SimpleWorker([queue], connection=queue.connection) - queue.enqueue(do_trick, "Bodhi", trick="roll over") + queue.enqueue( + do_trick, + "Bodhi", + {"age": 5}, + trick="roll over", + times=2, + followup=["fetch", "give paw"], + ) worker.work(burst=True) - traces_sampler.assert_any_call( - DictionaryContaining( - { - "rq_job": ObjectDescribedBy( - type=rq.job.Job, - attrs={ - "description": "tests.integrations.rq.test_rq.do_trick('Bodhi', trick='roll over')", - "result": "Bodhi, can you roll over? Good dog!", - "func_name": "tests.integrations.rq.test_rq.do_trick", - "args": ("Bodhi",), - "kwargs": {"trick": "roll over"}, - }, - ), - } - ) - ) + sampling_context = traces_sampler.call_args_list[0][0][0] + assert sampling_context["messaging.system"] == "rq" + assert sampling_context["rq.job.args.0"] == "Bodhi" + assert sampling_context["rq.job.args.1"] == "{'age': 5}" + assert sampling_context["rq.job.kwargs.trick"] == "roll over" + assert sampling_context["rq.job.kwargs.times"] == "2" + assert sampling_context["rq.job.kwargs.followup"] == "['fetch', 'give paw']" + assert sampling_context["rq.job.func"] == "do_trick" + assert sampling_context["messaging.message.id"] + assert sampling_context["messaging.destination.name"] == "default" @pytest.mark.skipif( diff --git a/tests/integrations/rust_tracing/test_rust_tracing.py b/tests/integrations/rust_tracing/test_rust_tracing.py index 893fc86966..a7ddbfa6ec 100644 --- a/tests/integrations/rust_tracing/test_rust_tracing.py +++ b/tests/integrations/rust_tracing/test_rust_tracing.py @@ -11,7 +11,8 @@ RustTracingLevel, EventTypeMapping, ) -from sentry_sdk import start_transaction, capture_message +from sentry_sdk import start_span, capture_message +from tests.conftest import ApproxDict def _test_event_type_mapping(metadata: Dict[str, object]) -> EventTypeMapping: @@ -74,11 +75,11 @@ def test_on_new_span_on_close(sentry_init, capture_events): sentry_init(integrations=[integration], traces_sample_rate=1.0) events = capture_events() - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) sentry_first_rust_span = sentry_sdk.get_current_span() - _, rust_first_rust_span = rust_tracing.spans[3] + rust_first_rust_span = rust_tracing.spans[3] assert sentry_first_rust_span == rust_first_rust_span @@ -102,7 +103,7 @@ def test_on_new_span_on_close(sentry_init, capture_events): data = span["data"] assert data["use_memoized"] assert data["index"] == 10 - assert data["version"] is None + assert "version" not in data def test_nested_on_new_span_on_close(sentry_init, capture_events): @@ -115,23 +116,19 @@ def test_nested_on_new_span_on_close(sentry_init, capture_events): sentry_init(integrations=[integration], traces_sample_rate=1.0) events = capture_events() - with start_transaction(): + with start_span(): original_sentry_span = sentry_sdk.get_current_span() rust_tracing.new_span(RustTracingLevel.Info, 3, index_arg=10) sentry_first_rust_span = sentry_sdk.get_current_span() - _, rust_first_rust_span = rust_tracing.spans[3] # Use a different `index_arg` value for the inner span to help # distinguish the two at the end of the test rust_tracing.new_span(RustTracingLevel.Info, 5, index_arg=9) sentry_second_rust_span = sentry_sdk.get_current_span() - rust_parent_span, rust_second_rust_span = rust_tracing.spans[5] + rust_second_rust_span = rust_tracing.spans[5] assert rust_second_rust_span == sentry_second_rust_span - assert rust_parent_span == sentry_first_rust_span - assert rust_parent_span == rust_first_rust_span - assert rust_parent_span != rust_second_rust_span rust_tracing.close_span(5) @@ -171,15 +168,15 @@ def test_nested_on_new_span_on_close(sentry_init, capture_events): first_span_data = first_span["data"] assert first_span_data["use_memoized"] assert first_span_data["index"] == 10 - assert first_span_data["version"] is None + assert "version" not in first_span_data second_span_data = second_span["data"] assert second_span_data["use_memoized"] assert second_span_data["index"] == 9 - assert second_span_data["version"] is None + assert "version" not in second_span_data -def test_on_new_span_without_transaction(sentry_init): +def test_no_spans_without_transaction(sentry_init): rust_tracing = FakeRustTracing() integration = RustTracingIntegration( "test_on_new_span_without_transaction", rust_tracing.set_layer_impl @@ -188,11 +185,9 @@ def test_on_new_span_without_transaction(sentry_init): assert sentry_sdk.get_current_span() is None - # Should still create a span hierarchy, it just will not be under a txn rust_tracing.new_span(RustTracingLevel.Info, 3) current_span = sentry_sdk.get_current_span() - assert current_span is not None - assert current_span.containing_transaction is None + assert current_span is None def test_on_event_exception(sentry_init, capture_events): @@ -207,7 +202,7 @@ def test_on_event_exception(sentry_init, capture_events): events = capture_events() sentry_sdk.get_isolation_scope().clear_breadcrumbs() - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) # Mapped to Exception @@ -243,7 +238,7 @@ def test_on_event_breadcrumb(sentry_init, capture_events): events = capture_events() sentry_sdk.get_isolation_scope().clear_breadcrumbs() - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) # Mapped to Breadcrumb @@ -274,7 +269,7 @@ def test_on_event_event(sentry_init, capture_events): events = capture_events() sentry_sdk.get_isolation_scope().clear_breadcrumbs() - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) # Mapped to Event @@ -311,7 +306,7 @@ def test_on_event_ignored(sentry_init, capture_events): events = capture_events() sentry_sdk.get_isolation_scope().clear_breadcrumbs() - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) # Ignored @@ -344,7 +339,7 @@ def span_filter(metadata: Dict[str, object]) -> bool: sentry_init(integrations=[integration], traces_sample_rate=1.0) events = capture_events() - with start_transaction(): + with start_span(): original_sentry_span = sentry_sdk.get_current_span() # Span is not ignored @@ -377,16 +372,16 @@ def test_record(sentry_init): ) sentry_init(integrations=[integration], traces_sample_rate=1.0) - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) span_before_record = sentry_sdk.get_current_span().to_json() - assert span_before_record["data"]["version"] is None + assert "version" not in span_before_record["attributes"] rust_tracing.record(3) span_after_record = sentry_sdk.get_current_span().to_json() - assert span_after_record["data"]["version"] == "memoized" + assert span_after_record["attributes"]["version"] == "memoized" def test_record_in_ignored_span(sentry_init): @@ -403,18 +398,18 @@ def span_filter(metadata: Dict[str, object]) -> bool: ) sentry_init(integrations=[integration], traces_sample_rate=1.0) - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) span_before_record = sentry_sdk.get_current_span().to_json() - assert span_before_record["data"]["version"] is None + assert "version" not in span_before_record["attributes"] rust_tracing.new_span(RustTracingLevel.Trace, 5) rust_tracing.record(5) # `on_record()` should not do anything to the current Sentry span if the associated Rust span was ignored span_after_record = sentry_sdk.get_current_span().to_json() - assert span_after_record["data"]["version"] is None + assert "version" not in span_after_record["attributes"] @pytest.mark.parametrize( @@ -443,33 +438,37 @@ def test_include_tracing_fields( traces_sample_rate=1.0, send_default_pii=send_default_pii, ) - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) span_before_record = sentry_sdk.get_current_span().to_json() if tracing_fields_expected: - assert span_before_record["data"]["version"] is None + assert "version" not in span_before_record["attributes"] else: - assert span_before_record["data"]["version"] == "[Filtered]" + assert span_before_record["attributes"]["version"] == "[Filtered]" rust_tracing.record(3) span_after_record = sentry_sdk.get_current_span().to_json() if tracing_fields_expected: - assert span_after_record["data"] == { - "thread.id": mock.ANY, - "thread.name": mock.ANY, - "use_memoized": True, - "version": "memoized", - "index": 10, - } + assert span_after_record["attributes"] == ApproxDict( + { + "thread.id": mock.ANY, + "thread.name": mock.ANY, + "use_memoized": True, + "version": "memoized", + "index": 10, + } + ) else: - assert span_after_record["data"] == { - "thread.id": mock.ANY, - "thread.name": mock.ANY, - "use_memoized": "[Filtered]", - "version": "[Filtered]", - "index": "[Filtered]", - } + assert span_after_record["attributes"] == ApproxDict( + { + "thread.id": mock.ANY, + "thread.name": mock.ANY, + "use_memoized": "[Filtered]", + "version": "[Filtered]", + "index": "[Filtered]", + } + ) diff --git a/tests/integrations/sanic/test_sanic.py b/tests/integrations/sanic/test_sanic.py index 0419127239..0244e0f329 100644 --- a/tests/integrations/sanic/test_sanic.py +++ b/tests/integrations/sanic/test_sanic.py @@ -3,6 +3,7 @@ import os import random import sys +from typing import Any, Iterable, Optional, Container from unittest.mock import Mock import pytest @@ -26,11 +27,6 @@ except ImportError: ReusableClient = None -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from collections.abc import Iterable, Container - from typing import Any, Optional SANIC_VERSION = tuple(map(int, SANIC_VERSION_RAW.split("."))) PERFORMANCE_SUPPORTED = SANIC_VERSION >= (21, 9) @@ -341,13 +337,13 @@ class TransactionTestConfig: def __init__( self, - integration_args, - url, - expected_status, - expected_transaction_name, - expected_source=None, - ): - # type: (Iterable[Optional[Container[int]]], str, int, Optional[str], Optional[str]) -> None + integration_args: Iterable[Optional[Container[int]]], + url: str, + expected_status: int, + expected_transaction_name: Optional[str], + expected_source: Optional[str] = None, + has_transaction_event: bool = True, + ) -> None: """ expected_transaction_name of None indicates we expect to not receive a transaction """ @@ -356,6 +352,7 @@ def __init__( self.expected_status = expected_status self.expected_transaction_name = expected_transaction_name self.expected_source = expected_source + self.has_transaction_event = has_transaction_event @pytest.mark.skipif( @@ -386,6 +383,7 @@ def __init__( url="/404", expected_status=404, expected_transaction_name=None, + has_transaction_event=False, ), TransactionTestConfig( # With no ignored HTTP statuses, we should get transactions for 404 errors @@ -401,11 +399,13 @@ def __init__( url="/message", expected_status=200, expected_transaction_name=None, + has_transaction_event=False, ), ], ) -def test_transactions(test_config, sentry_init, app, capture_events): - # type: (TransactionTestConfig, Any, Any, Any) -> None +def test_transactions( + test_config: TransactionTestConfig, sentry_init: Any, app: Any, capture_events: Any +) -> None: # Init the SanicIntegration with the desired arguments sentry_init( @@ -430,9 +430,7 @@ def test_transactions(test_config, sentry_init, app, capture_events): (transaction_event, *_) = [*transaction_events, None] # We should have no transaction event if and only if we expect no transactions - assert (transaction_event is None) == ( - test_config.expected_transaction_name is None - ) + assert bool(transaction_event) == test_config.has_transaction_event # If a transaction was expected, ensure it is correct assert ( diff --git a/tests/integrations/socket/test_socket.py b/tests/integrations/socket/test_socket.py index cc109e0968..05b2acbc76 100644 --- a/tests/integrations/socket/test_socket.py +++ b/tests/integrations/socket/test_socket.py @@ -1,6 +1,6 @@ import socket -from sentry_sdk import start_transaction +from sentry_sdk import start_span from sentry_sdk.integrations.socket import SocketIntegration from tests.conftest import ApproxDict, create_mock_http_server @@ -11,7 +11,7 @@ def test_getaddrinfo_trace(sentry_init, capture_events): sentry_init(integrations=[SocketIntegration()], traces_sample_rate=1.0) events = capture_events() - with start_transaction(): + with start_span(name="socket"): socket.getaddrinfo("localhost", PORT) (event,) = events @@ -33,7 +33,7 @@ def test_create_connection_trace(sentry_init, capture_events): sentry_init(integrations=[SocketIntegration()], traces_sample_rate=1.0) events = capture_events() - with start_transaction(): + with start_span(name="socket"): socket.create_connection(("localhost", PORT), timeout, None) (event,) = events @@ -44,9 +44,9 @@ def test_create_connection_trace(sentry_init, capture_events): assert connect_span["description"] == f"localhost:{PORT}" # noqa: E231 assert connect_span["data"] == ApproxDict( { - "address": ["localhost", PORT], + "address.host": "localhost", + "address.port": PORT, "timeout": timeout, - "source_address": None, } ) @@ -67,7 +67,7 @@ def test_span_origin(sentry_init, capture_events): ) events = capture_events() - with start_transaction(name="foo"): + with start_span(name="foo"): socket.create_connection(("localhost", PORT), 1, None) (event,) = events diff --git a/tests/integrations/sqlalchemy/test_sqlalchemy.py b/tests/integrations/sqlalchemy/test_sqlalchemy.py index d2a31a55d5..b1d85f9eb8 100644 --- a/tests/integrations/sqlalchemy/test_sqlalchemy.py +++ b/tests/integrations/sqlalchemy/test_sqlalchemy.py @@ -1,8 +1,10 @@ +import contextlib import os from datetime import datetime from unittest import mock import pytest +from freezegun import freeze_time from sqlalchemy import Column, ForeignKey, Integer, String, create_engine from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.declarative import declarative_base @@ -10,12 +12,9 @@ from sqlalchemy import text import sentry_sdk -from sentry_sdk import capture_message, start_transaction from sentry_sdk.consts import DEFAULT_MAX_VALUE_LENGTH, SPANDATA from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration -from sentry_sdk.serializer import MAX_EVENT_BYTES from sentry_sdk.tracing_utils import record_sql_queries -from sentry_sdk.utils import json_dumps def test_orm_queries(sentry_init, capture_events): @@ -53,7 +52,7 @@ class Address(Base): assert session.query(Person).first() == bob - capture_message("hi") + sentry_sdk.capture_message("hi") (event,) = events @@ -110,7 +109,7 @@ class Address(Base): Session = sessionmaker(bind=engine) # noqa: N806 session = Session() - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): with session.begin_nested(): session.query(Person).first() @@ -124,6 +123,7 @@ class Address(Base): session.query(Person).first() (event,) = events + assert event["transaction"] == "test_transaction" for span in event["spans"]: assert span["data"][SPANDATA.DB_SYSTEM] == "sqlite" @@ -184,7 +184,7 @@ class Address(Base): Session = sessionmaker(bind=engine) # noqa: N806 session = Session() - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): with session.begin_nested(): session.query(Person).first() @@ -216,7 +216,7 @@ def test_long_sql_query_preserved(sentry_init, capture_events): engine = create_engine( "sqlite:///:memory:", connect_args={"check_same_thread": False} ) - with start_transaction(name="test"): + with sentry_sdk.start_span(name="test"): with engine.connect() as con: con.execute(text(" UNION ".join("SELECT {}".format(i) for i in range(100)))) @@ -245,7 +245,7 @@ def processor(event, hint): engine = create_engine( "sqlite:///:memory:", connect_args={"check_same_thread": False} ) - with start_transaction(name="test"): + with sentry_sdk.start_span(name="test"): with engine.connect() as con: for _ in range(1500): con.execute( @@ -254,8 +254,6 @@ def processor(event, hint): (event,) = events - assert len(json_dumps(event)) > MAX_EVENT_BYTES - # Some spans are discarded. assert len(event["spans"]) == 1000 @@ -299,18 +297,16 @@ def test_engine_name_not_string(sentry_init): def test_query_source_disabled(sentry_init, capture_events): - sentry_options = { - "integrations": [SqlalchemyIntegration()], - "enable_tracing": True, - "enable_db_query_source": False, - "db_query_source_threshold_ms": 0, - } - - sentry_init(**sentry_options) + sentry_init( + integrations=[SqlalchemyIntegration()], + traces_sample_rate=1.0, + enable_db_query_source=False, + db_query_source_threshold_ms=0, + ) events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): Base = declarative_base() # noqa: N806 class Person(Base): @@ -352,7 +348,7 @@ class Person(Base): def test_query_source_enabled(sentry_init, capture_events, enable_db_query_source): sentry_options = { "integrations": [SqlalchemyIntegration()], - "enable_tracing": True, + "traces_sample_rate": 1.0, "db_query_source_threshold_ms": 0, } if enable_db_query_source is not None: @@ -362,7 +358,7 @@ def test_query_source_enabled(sentry_init, capture_events, enable_db_query_sourc events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): Base = declarative_base() # noqa: N806 class Person(Base): @@ -403,13 +399,13 @@ class Person(Base): def test_query_source(sentry_init, capture_events): sentry_init( integrations=[SqlalchemyIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=0, ) events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): Base = declarative_base() # noqa: N806 class Person(Base): @@ -468,7 +464,7 @@ def test_query_source_with_module_in_search_path(sentry_init, capture_events): """ sentry_init( integrations=[SqlalchemyIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=0, ) @@ -479,7 +475,7 @@ def test_query_source_with_module_in_search_path(sentry_init, capture_events): query_first_model_from_session, ) - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): Base = declarative_base() # noqa: N806 class Person(Base): @@ -531,13 +527,13 @@ class Person(Base): def test_no_query_source_if_duration_too_short(sentry_init, capture_events): sentry_init( integrations=[SqlalchemyIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=100, ) events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): Base = declarative_base() # noqa: N806 class Person(Base): @@ -558,11 +554,13 @@ class Person(Base): class fake_record_sql_queries: # noqa: N801 def __init__(self, *args, **kwargs): - with record_sql_queries(*args, **kwargs) as span: - self.span = span + with freeze_time(datetime(2024, 1, 1, microsecond=0)): + with record_sql_queries(*args, **kwargs) as span: + self.span = span + freezer = freeze_time(datetime(2024, 1, 1, microsecond=99999)) + freezer.start() - self.span.start_timestamp = datetime(2024, 1, 1, microsecond=0) - self.span.timestamp = datetime(2024, 1, 1, microsecond=99999) + freezer.stop() def __enter__(self): return self.span @@ -597,13 +595,13 @@ def __exit__(self, type, value, traceback): def test_query_source_if_duration_over_threshold(sentry_init, capture_events): sentry_init( integrations=[SqlalchemyIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=100, ) events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): Base = declarative_base() # noqa: N806 class Person(Base): @@ -622,19 +620,15 @@ class Person(Base): bob = Person(name="Bob") session.add(bob) - class fake_record_sql_queries: # noqa: N801 - def __init__(self, *args, **kwargs): + @contextlib.contextmanager + def fake_record_sql_queries(*args, **kwargs): # noqa: N801 + with freeze_time(datetime(2024, 1, 1, second=0)): with record_sql_queries(*args, **kwargs) as span: - self.span = span + freezer = freeze_time(datetime(2024, 1, 1, second=1)) + freezer.start() + yield span - self.span.start_timestamp = datetime(2024, 1, 1, microsecond=0) - self.span.timestamp = datetime(2024, 1, 1, microsecond=101000) - - def __enter__(self): - return self.span - - def __exit__(self, type, value, traceback): - pass + freezer.stop() with mock.patch( "sentry_sdk.integrations.sqlalchemy.record_sql_queries", @@ -687,7 +681,7 @@ def test_span_origin(sentry_init, capture_events): engine = create_engine( "sqlite:///:memory:", connect_args={"check_same_thread": False} ) - with start_transaction(name="foo"): + with sentry_sdk.start_span(name="foo"): with engine.connect() as con: con.execute(text("SELECT 0")) diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py index bc445bf8f2..bf89729b35 100644 --- a/tests/integrations/starlette/test_starlette.py +++ b/tests/integrations/starlette/test_starlette.py @@ -13,11 +13,13 @@ from sentry_sdk import capture_message, get_baggage, get_traceparent from sentry_sdk.integrations.asgi import SentryAsgiMiddleware +from sentry_sdk.integrations.logging import LoggingIntegration from sentry_sdk.integrations.starlette import ( StarletteIntegration, StarletteRequestExtractor, ) from sentry_sdk.utils import parse_version +from tests.conftest import ApproxDict import starlette from starlette.authentication import ( @@ -666,9 +668,9 @@ def test_middleware_spans(sentry_init, capture_events): "AuthenticationMiddleware", "ExceptionMiddleware", "AuthenticationMiddleware", # 'op': 'middleware.starlette.send' - "ServerErrorMiddleware", # 'op': 'middleware.starlette.send' "AuthenticationMiddleware", # 'op': 'middleware.starlette.send' "ServerErrorMiddleware", # 'op': 'middleware.starlette.send' + "ServerErrorMiddleware", # 'op': 'middleware.starlette.send' ] assert len(transaction_event["spans"]) == len(expected_middleware_spans) @@ -743,23 +745,23 @@ def test_middleware_callback_spans(sentry_init, capture_events): }, { "op": "middleware.starlette.send", - "description": "ServerErrorMiddleware.__call__.._send", - "tags": {"starlette.middleware_name": "SampleMiddleware"}, + "description": "SampleMiddleware.__call__..do_stuff", + "tags": {"starlette.middleware_name": "ExceptionMiddleware"}, }, { "op": "middleware.starlette.send", - "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send", - "tags": {"starlette.middleware_name": "ServerErrorMiddleware"}, + "description": "ServerErrorMiddleware.__call__.._send", + "tags": {"starlette.middleware_name": "SampleMiddleware"}, }, { "op": "middleware.starlette.send", - "description": "SampleMiddleware.__call__..do_stuff", - "tags": {"starlette.middleware_name": "ExceptionMiddleware"}, + "description": "ServerErrorMiddleware.__call__.._send", + "tags": {"starlette.middleware_name": "SampleMiddleware"}, }, { "op": "middleware.starlette.send", - "description": "ServerErrorMiddleware.__call__.._send", - "tags": {"starlette.middleware_name": "SampleMiddleware"}, + "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send", + "tags": {"starlette.middleware_name": "ServerErrorMiddleware"}, }, { "op": "middleware.starlette.send", @@ -772,7 +774,7 @@ def test_middleware_callback_spans(sentry_init, capture_events): for span in transaction_event["spans"]: assert span["op"] == expected[idx]["op"] assert span["description"] == expected[idx]["description"] - assert span["tags"] == expected[idx]["tags"] + assert span["tags"] == ApproxDict(expected[idx]["tags"]) idx += 1 @@ -837,16 +839,16 @@ def test_middleware_partial_receive_send(sentry_init, capture_events): "description": "ServerErrorMiddleware.__call__.._send", "tags": {"starlette.middleware_name": "SamplePartialReceiveSendMiddleware"}, }, - { - "op": "middleware.starlette.send", - "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send", - "tags": {"starlette.middleware_name": "ServerErrorMiddleware"}, - }, { "op": "middleware.starlette", "description": "ExceptionMiddleware", "tags": {"starlette.middleware_name": "ExceptionMiddleware"}, }, + { + "op": "middleware.starlette.send", + "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send", + "tags": {"starlette.middleware_name": "ServerErrorMiddleware"}, + }, { "op": "middleware.starlette.send", "description": "functools.partial(.my_send at ", @@ -863,7 +865,7 @@ def test_middleware_partial_receive_send(sentry_init, capture_events): for span in transaction_event["spans"]: assert span["op"] == expected[idx]["op"] assert span["description"].startswith(expected[idx]["description"]) - assert span["tags"] == expected[idx]["tags"] + assert span["tags"] == ApproxDict(expected[idx]["tags"]) idx += 1 @@ -909,13 +911,13 @@ def test_active_thread_id(sentry_init, capture_envelopes, teardown_profiling, en sentry_init( traces_sample_rate=1.0, profiles_sample_rate=1.0, + integrations=[StarletteIntegration()], ) app = starlette_app_factory() - asgi_app = SentryAsgiMiddleware(app) envelopes = capture_envelopes() - client = TestClient(asgi_app) + client = TestClient(app) response = client.get(endpoint) assert response.status_code == 200 @@ -942,7 +944,9 @@ def test_active_thread_id(sentry_init, capture_envelopes, teardown_profiling, en def test_original_request_not_scrubbed(sentry_init, capture_events): - sentry_init(integrations=[StarletteIntegration()]) + sentry_init( + integrations=[StarletteIntegration(), LoggingIntegration(event_level="ERROR")] + ) events = capture_events() @@ -1181,82 +1185,6 @@ def test_span_origin(sentry_init, capture_events): assert span["origin"] == "auto.http.starlette" -class NonIterableContainer: - """Wraps any container and makes it non-iterable. - - Used to test backwards compatibility with our old way of defining failed_request_status_codes, which allowed - passing in a list of (possibly non-iterable) containers. The Python standard library does not provide any built-in - non-iterable containers, so we have to define our own. - """ - - def __init__(self, inner): - self.inner = inner - - def __contains__(self, item): - return item in self.inner - - -parametrize_test_configurable_status_codes_deprecated = pytest.mark.parametrize( - "failed_request_status_codes,status_code,expected_error", - [ - (None, 500, True), - (None, 400, False), - ([500, 501], 500, True), - ([500, 501], 401, False), - ([range(400, 499)], 401, True), - ([range(400, 499)], 500, False), - ([range(400, 499), range(500, 599)], 300, False), - ([range(400, 499), range(500, 599)], 403, True), - ([range(400, 499), range(500, 599)], 503, True), - ([range(400, 403), 500, 501], 401, True), - ([range(400, 403), 500, 501], 405, False), - ([range(400, 403), 500, 501], 501, True), - ([range(400, 403), 500, 501], 503, False), - ([], 500, False), - ([NonIterableContainer(range(500, 600))], 500, True), - ([NonIterableContainer(range(500, 600))], 404, False), - ], -) -"""Test cases for configurable status codes (deprecated API). -Also used by the FastAPI tests. -""" - - -@parametrize_test_configurable_status_codes_deprecated -def test_configurable_status_codes_deprecated( - sentry_init, - capture_events, - failed_request_status_codes, - status_code, - expected_error, -): - with pytest.warns(DeprecationWarning): - starlette_integration = StarletteIntegration( - failed_request_status_codes=failed_request_status_codes - ) - - sentry_init(integrations=[starlette_integration]) - - events = capture_events() - - async def _error(request): - raise HTTPException(status_code) - - app = starlette.applications.Starlette( - routes=[ - starlette.routing.Route("/error", _error, methods=["GET"]), - ], - ) - - client = TestClient(app) - client.get("/error") - - if expected_error: - assert len(events) == 1 - else: - assert not events - - @pytest.mark.skipif( STARLETTE_VERSION < (0, 21), reason="Requires Starlette >= 0.21, because earlier versions do not support HTTP 'HEAD' requests", @@ -1267,9 +1195,8 @@ def test_transaction_http_method_default(sentry_init, capture_events): """ sentry_init( traces_sample_rate=1.0, - integrations=[ - StarletteIntegration(), - ], + auto_enabling_integrations=False, # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request. + integrations=[StarletteIntegration()], ) events = capture_events() @@ -1294,6 +1221,7 @@ def test_transaction_http_method_default(sentry_init, capture_events): def test_transaction_http_method_custom(sentry_init, capture_events): sentry_init( traces_sample_rate=1.0, + auto_enabling_integrations=False, # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request. integrations=[ StarletteIntegration( http_methods_to_capture=( diff --git a/tests/integrations/starlite/test_starlite.py b/tests/integrations/starlite/test_starlite.py index 2c3aa704f5..79fa755608 100644 --- a/tests/integrations/starlite/test_starlite.py +++ b/tests/integrations/starlite/test_starlite.py @@ -5,6 +5,7 @@ from sentry_sdk import capture_message from sentry_sdk.integrations.starlite import StarliteIntegration +from tests.conftest import ApproxDict from typing import Any, Dict @@ -199,7 +200,7 @@ def is_matching_span(expected_span, actual_span): return ( expected_span["op"] == actual_span["op"] and expected_span["description"] == actual_span["description"] - and expected_span["tags"] == actual_span["tags"] + and ApproxDict(expected_span["tags"]) == actual_span["tags"] ) actual_starlite_spans = list( @@ -295,7 +296,7 @@ def is_matching_span(expected_span, actual_span): return ( expected_span["op"] == actual_span["op"] and actual_span["description"].startswith(expected_span["description"]) - and expected_span["tags"] == actual_span["tags"] + and ApproxDict(expected_span["tags"]) == actual_span["tags"] ) actual_starlite_spans = list( diff --git a/tests/integrations/stdlib/test_httplib.py b/tests/integrations/stdlib/test_httplib.py index f6735d0e74..44e2cb5265 100644 --- a/tests/integrations/stdlib/test_httplib.py +++ b/tests/integrations/stdlib/test_httplib.py @@ -6,16 +6,40 @@ import pytest -from sentry_sdk import capture_message, start_transaction +from sentry_sdk import capture_message, start_span, continue_trace, isolation_scope from sentry_sdk.consts import MATCH_ALL, SPANDATA -from sentry_sdk.tracing import Transaction from sentry_sdk.integrations.stdlib import StdlibIntegration -from tests.conftest import ApproxDict, create_mock_http_server +from tests.conftest import ApproxDict, SortedBaggage, create_mock_http_server PORT = create_mock_http_server() +@pytest.fixture +def capture_request_headers(monkeypatch): + """ + HTTPConnection.send is passed a string containing (among other things) + the headers on the request. Mock it so we can check the headers. + """ + + def inner(do_send=True): + request_headers = {} + old_send = HTTPConnection.send + + def patched_send(self, data): + for line in data.decode("utf-8").split("\r\n")[1:]: + if line: + key, val = line.split(": ") + request_headers[key] = val + if do_send: + old_send(self, data) + + monkeypatch.setattr(HTTPConnection, "send", patched_send) + return request_headers + + return inner + + def test_crumb_capture(sentry_init, capture_events): sentry_init(integrations=[StdlibIntegration()]) events = capture_events() @@ -45,8 +69,8 @@ def test_crumb_capture(sentry_init, capture_events): @pytest.mark.parametrize( "status_code,level", [ - (200, None), - (301, None), + (200, "info"), + (301, "info"), (403, "warning"), (405, "warning"), (500, "error"), @@ -69,12 +93,7 @@ def test_crumb_capture_client_error(sentry_init, capture_events, status_code, le assert crumb["type"] == "http" assert crumb["category"] == "httplib" - - if level is None: - assert "level" not in crumb - else: - assert crumb["level"] == level - + assert crumb["level"] == level assert crumb["data"] == ApproxDict( { "url": url, @@ -123,7 +142,7 @@ def test_empty_realurl(sentry_init): """ sentry_init(dsn="") - HTTPConnection("localhost", port=PORT).putrequest("POST", None) + HTTPConnection("localhost", PORT).putrequest("POST", None) def test_httplib_misuse(sentry_init, capture_events, request): @@ -175,98 +194,89 @@ def test_httplib_misuse(sentry_init, capture_events, request): ) -def test_outgoing_trace_headers(sentry_init, monkeypatch): - # HTTPSConnection.send is passed a string containing (among other things) - # the headers on the request. Mock it so we can check the headers, and also - # so it doesn't try to actually talk to the internet. - mock_send = mock.Mock() - monkeypatch.setattr(HTTPSConnection, "send", mock_send) - +def test_outgoing_trace_headers( + sentry_init, capture_envelopes, capture_request_headers +): sentry_init(traces_sample_rate=1.0) + envelopes = capture_envelopes() + request_headers = capture_request_headers() headers = { + "sentry-trace": "771a43a4192642f0b136d5159a501700-1234567890abcdef-1", "baggage": ( "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, " "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, " - "sentry-user_id=Am%C3%A9lie, sentry-sample_rand=0.132521102938283, other-vendor-value-2=foo;bar;" + "sentry-user_id=Am%C3%A9lie, sentry-sample_rand=0.003370, other-vendor-value-2=foo;bar;" ), } - transaction = Transaction.continue_from_headers(headers) + with isolation_scope(): + with continue_trace(headers): + with start_span(name="/interactions/other-dogs/new-dog"): + conn = HTTPConnection("localhost", PORT) + conn.request("GET", "/top-chasers") + conn.getresponse() + + (envelope,) = envelopes + transaction = envelope.get_transaction_event() + request_span = transaction["spans"][-1] + + expected_sentry_trace = "{trace_id}-{parent_span_id}-{sampled}".format( + trace_id=transaction["contexts"]["trace"]["trace_id"], + parent_span_id=request_span["span_id"], + sampled=1, + ) + assert request_headers["sentry-trace"] == expected_sentry_trace + + expected_outgoing_baggage = ( + "sentry-trace_id=771a43a4192642f0b136d5159a501700," + "sentry-public_key=49d0f7386ad645858ae85020e393bef3," + "sentry-sample_rate=0.01337," + "sentry-user_id=Am%C3%A9lie," + "sentry-sample_rand=0.003370," + "sentry-sampled=true" + ) - with start_transaction( - transaction=transaction, - name="/interactions/other-dogs/new-dog", - op="greeting.sniff", - trace_id="12312012123120121231201212312012", - ) as transaction: - HTTPSConnection("www.squirrelchasers.com").request("GET", "/top-chasers") + assert request_headers["baggage"] == SortedBaggage(expected_outgoing_baggage) - (request_str,) = mock_send.call_args[0] - request_headers = {} - for line in request_str.decode("utf-8").split("\r\n")[1:]: - if line: - key, val = line.split(": ") - request_headers[key] = val - - request_span = transaction._span_recorder.spans[-1] - expected_sentry_trace = "{trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction.trace_id, - parent_span_id=request_span.span_id, - sampled=1, - ) - assert request_headers["sentry-trace"] == expected_sentry_trace - - expected_outgoing_baggage = ( - "sentry-trace_id=771a43a4192642f0b136d5159a501700," - "sentry-public_key=49d0f7386ad645858ae85020e393bef3," - "sentry-sample_rate=1.0," - "sentry-user_id=Am%C3%A9lie," - "sentry-sample_rand=0.132521102938283" - ) - - assert request_headers["baggage"] == expected_outgoing_baggage - - -def test_outgoing_trace_headers_head_sdk(sentry_init, monkeypatch): - # HTTPSConnection.send is passed a string containing (among other things) - # the headers on the request. Mock it so we can check the headers, and also - # so it doesn't try to actually talk to the internet. - mock_send = mock.Mock() - monkeypatch.setattr(HTTPSConnection, "send", mock_send) +def test_outgoing_trace_headers_head_sdk( + sentry_init, capture_request_headers, capture_envelopes +): sentry_init(traces_sample_rate=0.5, release="foo") - with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.25): - transaction = Transaction.continue_from_headers({}) + envelopes = capture_envelopes() + request_headers = capture_request_headers() - with start_transaction(transaction=transaction, name="Head SDK tx") as transaction: - HTTPSConnection("www.squirrelchasers.com").request("GET", "/top-chasers") + with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.25): + with isolation_scope(): + with continue_trace({}): + with start_span(name="Head SDK tx") as root_span: + conn = HTTPConnection("localhost", PORT) + conn.request("GET", "/top-chasers") + conn.getresponse() + + (envelope,) = envelopes + transaction = envelope.get_transaction_event() + request_span = transaction["spans"][-1] + + expected_sentry_trace = "{trace_id}-{parent_span_id}-{sampled}".format( + trace_id=transaction["contexts"]["trace"]["trace_id"], + parent_span_id=request_span["span_id"], + sampled=1, + ) + assert request_headers["sentry-trace"] == expected_sentry_trace + + expected_outgoing_baggage = ( + f"sentry-trace_id={root_span.trace_id}," # noqa: E231 + "sentry-sample_rand=0.250000," + "sentry-environment=production," + "sentry-release=foo," + "sentry-sample_rate=0.5," + "sentry-sampled=true," + "sentry-transaction=Head%20SDK%20tx" + ) - (request_str,) = mock_send.call_args[0] - request_headers = {} - for line in request_str.decode("utf-8").split("\r\n")[1:]: - if line: - key, val = line.split(": ") - request_headers[key] = val - - request_span = transaction._span_recorder.spans[-1] - expected_sentry_trace = "{trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction.trace_id, - parent_span_id=request_span.span_id, - sampled=1, - ) - assert request_headers["sentry-trace"] == expected_sentry_trace - - expected_outgoing_baggage = ( - "sentry-trace_id=%s," - "sentry-sample_rand=0.250000," - "sentry-environment=production," - "sentry-release=foo," - "sentry-sample_rate=0.5," - "sentry-sampled=%s" - ) % (transaction.trace_id, "true" if transaction.sampled else "false") - - assert request_headers["baggage"] == expected_outgoing_baggage + assert request_headers["baggage"] == SortedBaggage(expected_outgoing_baggage) @pytest.mark.parametrize( @@ -329,42 +339,23 @@ def test_outgoing_trace_headers_head_sdk(sentry_init, monkeypatch): ], ) def test_option_trace_propagation_targets( - sentry_init, monkeypatch, trace_propagation_targets, host, path, trace_propagated + sentry_init, + capture_request_headers, + trace_propagation_targets, + host, + path, + trace_propagated, ): - # HTTPSConnection.send is passed a string containing (among other things) - # the headers on the request. Mock it so we can check the headers, and also - # so it doesn't try to actually talk to the internet. - mock_send = mock.Mock() - monkeypatch.setattr(HTTPSConnection, "send", mock_send) - sentry_init( trace_propagation_targets=trace_propagation_targets, traces_sample_rate=1.0, ) - headers = { - "baggage": ( - "sentry-trace_id=771a43a4192642f0b136d5159a501700, " - "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, " - ) - } - - transaction = Transaction.continue_from_headers(headers) + request_headers = capture_request_headers(do_send=False) - with start_transaction( - transaction=transaction, - name="/interactions/other-dogs/new-dog", - op="greeting.sniff", - trace_id="12312012123120121231201212312012", - ) as transaction: + with start_span(name="foo"): HTTPSConnection(host).request("GET", path) - - (request_str,) = mock_send.call_args[0] - request_headers = {} - for line in request_str.decode("utf-8").split("\r\n")[1:]: - if line: - key, val = line.split(": ") - request_headers[key] = val + # don't invoke getresponse to avoid actual network traffic if trace_propagated: assert "sentry-trace" in request_headers @@ -378,8 +369,8 @@ def test_span_origin(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0, debug=True) events = capture_events() - with start_transaction(name="foo"): - conn = HTTPConnection("localhost", port=PORT) + with start_span(name="foo"): + conn = HTTPConnection("localhost", PORT) conn.request("GET", "/foo") conn.getresponse() @@ -399,7 +390,7 @@ def test_http_timeout(monkeypatch, sentry_init, capture_envelopes): envelopes = capture_envelopes() with pytest.raises(TimeoutError): - with start_transaction(op="op", name="name"): + with start_span(op="op", name="name"): conn = HTTPConnection("localhost", port=PORT) conn.request("GET", "/bla") conn.getresponse() diff --git a/tests/integrations/stdlib/test_subprocess.py b/tests/integrations/stdlib/test_subprocess.py index 593ef8a0dc..8e3166e512 100644 --- a/tests/integrations/stdlib/test_subprocess.py +++ b/tests/integrations/stdlib/test_subprocess.py @@ -3,10 +3,11 @@ import subprocess import sys from collections.abc import Mapping +from unittest import mock import pytest -from sentry_sdk import capture_message, start_transaction +from sentry_sdk import capture_exception, capture_message, start_span from sentry_sdk.integrations.stdlib import StdlibIntegration from tests.conftest import ApproxDict @@ -58,7 +59,7 @@ def test_subprocess_basic( sentry_init(integrations=[StdlibIntegration()], traces_sample_rate=1.0) events = capture_events() - with start_transaction(name="foo") as transaction: + with start_span(name="foo") as span: args = [ sys.executable, "-c", @@ -109,7 +110,7 @@ def test_subprocess_basic( assert os.environ == old_environ - assert transaction.trace_id in str(output) + assert span.trace_id in str(output) capture_message("hi") @@ -177,7 +178,7 @@ def test_subprocess_basic( def test_subprocess_empty_env(sentry_init, monkeypatch): monkeypatch.setenv("TEST_MARKER", "should_not_be_seen") sentry_init(integrations=[StdlibIntegration()], traces_sample_rate=1.0) - with start_transaction(name="foo"): + with start_span(name="foo"): args = [ sys.executable, "-c", @@ -200,7 +201,7 @@ def test_subprocess_span_origin(sentry_init, capture_events): sentry_init(integrations=[StdlibIntegration()], traces_sample_rate=1.0) events = capture_events() - with start_transaction(name="foo"): + with start_span(name="foo"): args = [ sys.executable, "-c", @@ -224,3 +225,37 @@ def test_subprocess_span_origin(sentry_init, capture_events): assert event["spans"][2]["op"] == "subprocess.wait" assert event["spans"][2]["origin"] == "auto.subprocess.stdlib.subprocess" + + +def test_subprocess_breadcrumb(sentry_init, capture_events): + sentry_init() + events = capture_events() + + args = [ + sys.executable, + "-c", + "print('hello world')", + ] + popen = subprocess.Popen(args) + popen.communicate() + popen.poll() + + try: + 1 / 0 + except ZeroDivisionError as ex: + capture_exception(ex) + + (event,) = events + breadcrumbs = event["breadcrumbs"]["values"] + assert len(breadcrumbs) == 1 + + (crumb,) = breadcrumbs + assert crumb["type"] == "subprocess" + assert crumb["category"] == "subprocess" + assert crumb["message"] == " ".join(args) + assert crumb["timestamp"] == mock.ANY + assert crumb["data"] == { + "subprocess.pid": popen.pid, + "thread.id": mock.ANY, + "thread.name": mock.ANY, + } diff --git a/tests/integrations/strawberry/test_strawberry.py b/tests/integrations/strawberry/test_strawberry.py index 7b40b238d2..d1774aeca5 100644 --- a/tests/integrations/strawberry/test_strawberry.py +++ b/tests/integrations/strawberry/test_strawberry.py @@ -204,7 +204,9 @@ def test_capture_request_if_available_and_send_pii_is_on( (error_event,) = events - assert error_event["exception"]["values"][0]["mechanism"]["type"] == "strawberry" + assert len(error_event["exception"]["values"]) == 2 + assert error_event["exception"]["values"][0]["mechanism"]["type"] == "chained" + assert error_event["exception"]["values"][-1]["mechanism"]["type"] == "strawberry" assert error_event["request"]["api_target"] == "graphql" assert error_event["request"]["data"] == { "query": query, @@ -258,7 +260,10 @@ def test_do_not_capture_request_if_send_pii_is_off( assert len(events) == 1 (error_event,) = events - assert error_event["exception"]["values"][0]["mechanism"]["type"] == "strawberry" + + assert len(error_event["exception"]["values"]) == 2 + assert error_event["exception"]["values"][0]["mechanism"]["type"] == "chained" + assert error_event["exception"]["values"][-1]["mechanism"]["type"] == "strawberry" assert "data" not in error_event["request"] assert "response" not in error_event["contexts"] @@ -505,7 +510,7 @@ def test_transaction_no_operation_name( query_span = query_spans[0] assert query_span["description"] == "query" assert query_span["data"]["graphql.operation.type"] == "query" - assert query_span["data"]["graphql.operation.name"] is None + assert "graphql.operation.name" not in query_span["data"] assert query_span["data"]["graphql.document"] == query assert query_span["data"]["graphql.resource_name"] @@ -582,7 +587,7 @@ def test_transaction_mutation( query_span = query_spans[0] assert query_span["description"] == "mutation" assert query_span["data"]["graphql.operation.type"] == "mutation" - assert query_span["data"]["graphql.operation.name"] is None + assert query_span["data"]["graphql.operation.name"] == "Change" assert query_span["data"]["graphql.document"] == query assert query_span["data"]["graphql.resource_name"] diff --git a/tests/integrations/threading/test_threading.py b/tests/integrations/threading/test_threading.py index 4577c846d8..6015ce9ca8 100644 --- a/tests/integrations/threading/test_threading.py +++ b/tests/integrations/threading/test_threading.py @@ -2,6 +2,7 @@ from concurrent import futures from textwrap import dedent from threading import Thread +import sys import pytest @@ -38,11 +39,11 @@ def crash(): @pytest.mark.filterwarnings("ignore:.*:pytest.PytestUnhandledThreadExceptionWarning") -@pytest.mark.parametrize("propagate_hub", (True, False)) -def test_propagates_hub(sentry_init, capture_events, propagate_hub): +@pytest.mark.parametrize("propagate_scope", (True, False)) +def test_propagates_scope(sentry_init, capture_events, propagate_scope): sentry_init( default_integrations=False, - integrations=[ThreadingIntegration(propagate_hub=propagate_hub)], + integrations=[ThreadingIntegration(propagate_scope=propagate_scope)], ) events = capture_events() @@ -68,25 +69,27 @@ def stage2(): assert exception["mechanism"]["type"] == "threading" assert not exception["mechanism"]["handled"] - if propagate_hub: + if propagate_scope: assert event["tags"]["stage1"] == "true" else: assert "stage1" not in event.get("tags", {}) -@pytest.mark.parametrize("propagate_hub", (True, False)) -def test_propagates_threadpool_hub(sentry_init, capture_events, propagate_hub): +@pytest.mark.parametrize("propagate_scope", (True, False)) +def test_propagates_threadpool_scope(sentry_init, capture_events, propagate_scope): sentry_init( traces_sample_rate=1.0, - integrations=[ThreadingIntegration(propagate_hub=propagate_hub)], + integrations=[ThreadingIntegration(propagate_scope=propagate_scope)], ) events = capture_events() def double(number): - with sentry_sdk.start_span(op="task", name=str(number)): + with sentry_sdk.start_span( + op="task", name=str(number), only_as_child_span=True + ): return number * 2 - with sentry_sdk.start_transaction(name="test_handles_threadpool"): + with sentry_sdk.start_span(name="test_handles_threadpool"): with futures.ThreadPoolExecutor(max_workers=1) as executor: tasks = [executor.submit(double, number) for number in [1, 2, 3, 4]] for future in futures.as_completed(tasks): @@ -94,7 +97,7 @@ def double(number): sentry_sdk.flush() - if propagate_hub: + if propagate_scope: assert len(events) == 1 (event,) = events assert event["spans"][0]["trace_id"] == event["spans"][1]["trace_id"] @@ -106,14 +109,15 @@ def double(number): assert len(event["spans"]) == 0 -@pytest.mark.skip(reason="Temporarily disable to release SDK 2.0a1.") +@pytest.mark.skipif(sys.version[:3] == "3.8", reason="Fails in CI on 3.8") def test_circular_references(sentry_init, request): sentry_init(default_integrations=False, integrations=[ThreadingIntegration()]) - gc.collect() gc.disable() request.addfinalizer(gc.enable) + gc.collect() + class MyThread(Thread): def run(self): pass @@ -235,8 +239,8 @@ def do_some_work(number): threads = [] - with sentry_sdk.start_transaction(op="outer-trx"): - for number in range(5): + with sentry_sdk.start_span(op="outer-trx"): + for number in range(2): with sentry_sdk.start_span( op=f"outer-submit-{number}", name="Thread: main" ): @@ -247,32 +251,44 @@ def do_some_work(number): for t in threads: t.join() - (event,) = events if propagate_scope: + # The children spans from the threads become parts of the existing span + # tree since we propagated the scope + assert len(events) == 1 + (event,) = events + assert render_span_tree(event) == dedent( """\ - op="outer-trx": description=null - op="outer-submit-0": description="Thread: main" - op="inner-run-0": description="Thread: child-0" - op="outer-submit-1": description="Thread: main" - - op="inner-run-1": description="Thread: child-1" - - op="outer-submit-2": description="Thread: main" - - op="inner-run-2": description="Thread: child-2" - - op="outer-submit-3": description="Thread: main" - - op="inner-run-3": description="Thread: child-3" - - op="outer-submit-4": description="Thread: main" - - op="inner-run-4": description="Thread: child-4"\ + - op="inner-run-1": description="Thread: child-1"\ """ ) elif not propagate_scope: - assert render_span_tree(event) == dedent( + # The spans from the threads become their own root spans/transactions + # as the connection to the parent span was severed when the scope was + # cleared + assert len(events) == 3 + (event1, event2, event3) = sorted(events, key=render_span_tree) + + assert render_span_tree(event1) == dedent( + """\ + - op="inner-run-0": description=null\ +""" + ) + assert render_span_tree(event2) == dedent( + """\ + - op="inner-run-1": description=null\ +""" + ) + + assert render_span_tree(event3) == dedent( """\ - op="outer-trx": description=null - op="outer-submit-0": description="Thread: main" - - op="outer-submit-1": description="Thread: main" - - op="outer-submit-2": description="Thread: main" - - op="outer-submit-3": description="Thread: main" - - op="outer-submit-4": description="Thread: main"\ + - op="outer-submit-1": description="Thread: main"\ """ ) diff --git a/tests/integrations/tornado/test_tornado.py b/tests/integrations/tornado/test_tornado.py index 294f605f6a..e5dae3fcd9 100644 --- a/tests/integrations/tornado/test_tornado.py +++ b/tests/integrations/tornado/test_tornado.py @@ -1,9 +1,10 @@ import json +import re import pytest import sentry_sdk -from sentry_sdk import start_transaction, capture_message +from sentry_sdk import start_span, capture_message from sentry_sdk.integrations.tornado import TornadoIntegration from tornado.web import RequestHandler, Application, HTTPError @@ -116,7 +117,7 @@ def test_transactions(tornado_testcase, sentry_init, capture_events, handler, co events = capture_events() client = tornado_testcase(Application([(r"/hi", handler)])) - with start_transaction(name="client") as span: + with start_span(name="client") as span: pass response = client.fetch( @@ -134,7 +135,7 @@ def test_transactions(tornado_testcase, sentry_init, capture_events, handler, co assert client_tx["transaction"] == "client" assert client_tx["transaction_info"] == { "source": "custom" - } # because this is just the start_transaction() above. + } # because this is just the start_span() above. if server_error is not None: assert server_error["exception"]["values"][0]["type"] == "ZeroDivisionError" @@ -450,3 +451,30 @@ def test_span_origin(tornado_testcase, sentry_init, capture_events): (_, event) = events assert event["contexts"]["trace"]["origin"] == "auto.http.tornado" + + +def test_attributes_in_traces_sampler(tornado_testcase, sentry_init): + def traces_sampler(sampling_context): + assert sampling_context["url.query"] == "foo=bar" + assert sampling_context["url.path"] == "/hi" + assert sampling_context["url.scheme"] == "http" + assert re.match( + r"http:\/\/127\.0\.0\.1:[0-9]{4,5}\/hi\?foo=bar", + sampling_context["url.full"], + ) + assert sampling_context["http.request.method"] == "GET" + assert sampling_context["server.address"] == "127.0.0.1" + assert sampling_context["server.port"].isnumeric() + assert sampling_context["network.protocol.name"] == "HTTP" + assert sampling_context["network.protocol.version"] == "1.1" + assert sampling_context["http.request.header.custom-header"] == "Custom Value" + + return True + + sentry_init( + integrations=[TornadoIntegration], + traces_sampler=traces_sampler, + ) + + client = tornado_testcase(Application([(r"/hi", HelloHandler)])) + client.fetch("/hi?foo=bar", headers={"Custom-Header": "Custom Value"}) diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py index 656fc1757f..76c80f6c6a 100644 --- a/tests/integrations/wsgi/test_wsgi.py +++ b/tests/integrations/wsgi/test_wsgi.py @@ -40,7 +40,7 @@ def next(self): def test_basic(sentry_init, crashing_app, capture_events): - sentry_init(send_default_pii=True) + sentry_init(send_default_pii=True, debug=True) app = SentryWsgiMiddleware(crashing_app) client = Client(app) events = capture_events() @@ -141,7 +141,7 @@ def test_transaction_with_error( def dogpark(environ, start_response): raise ValueError("Fetch aborted. The ball was not returned.") - sentry_init(send_default_pii=True, traces_sample_rate=1.0) + sentry_init(send_default_pii=True, traces_sample_rate=1.0, debug=True) app = SentryWsgiMiddleware(dogpark) client = Client(app) events = capture_events() @@ -326,33 +326,27 @@ def dogpark(environ, start_response): assert error_event["contexts"]["trace"]["trace_id"] == trace_id -def test_traces_sampler_gets_correct_values_in_sampling_context( - sentry_init, - DictionaryContaining, # noqa:N803 -): +def test_traces_sampler_gets_correct_values_in_sampling_context(sentry_init): def app(environ, start_response): start_response("200 OK", []) return ["Go get the ball! Good dog!"] - traces_sampler = mock.Mock(return_value=True) + def traces_sampler(sampling_context): + assert sampling_context["http.request.method"] == "GET" + assert sampling_context["url.path"] == "/dogs/are/great/" + assert sampling_context["url.query"] == "cats=too" + assert sampling_context["url.scheme"] == "http" + assert ( + sampling_context["url.full"] == "http://localhost/dogs/are/great/?cats=too" + ) + assert sampling_context["http.request.header.custom-header"] == "Custom Value" + return True + sentry_init(send_default_pii=True, traces_sampler=traces_sampler) app = SentryWsgiMiddleware(app) client = Client(app) - client.get("/dogs/are/great/") - - traces_sampler.assert_any_call( - DictionaryContaining( - { - "wsgi_environ": DictionaryContaining( - { - "PATH_INFO": "/dogs/are/great/", - "REQUEST_METHOD": "GET", - }, - ), - } - ) - ) + client.get("/dogs/are/great/?cats=too", headers={"Custom-Header": "Custom Value"}) def test_session_mode_defaults_to_request_mode_in_wsgi_handler( @@ -443,7 +437,7 @@ def test_app(environ, start_response): sentry_init( traces_sample_rate=1.0, - _experiments={"profiles_sample_rate": 1.0}, + profiles_sample_rate=1.0, ) app = SentryWsgiMiddleware(test_app) envelopes = capture_envelopes() diff --git a/tests/new_scopes_compat/__init__.py b/tests/new_scopes_compat/__init__.py deleted file mode 100644 index 45391bd9ad..0000000000 --- a/tests/new_scopes_compat/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -""" -Separate module for tests that check backwards compatibility of the Hub API with 1.x. -These tests should be removed once we remove the Hub API, likely in the next major. - -All tests in this module are run with hub isolation, provided by `isolate_hub` autouse -fixture, defined in `conftest.py`. -""" diff --git a/tests/new_scopes_compat/conftest.py b/tests/new_scopes_compat/conftest.py deleted file mode 100644 index 9f16898dea..0000000000 --- a/tests/new_scopes_compat/conftest.py +++ /dev/null @@ -1,8 +0,0 @@ -import pytest -import sentry_sdk - - -@pytest.fixture(autouse=True) -def isolate_hub(suppress_deprecation_warnings): - with sentry_sdk.Hub(None): - yield diff --git a/tests/new_scopes_compat/test_new_scopes_compat.py b/tests/new_scopes_compat/test_new_scopes_compat.py deleted file mode 100644 index 21e2ac27d3..0000000000 --- a/tests/new_scopes_compat/test_new_scopes_compat.py +++ /dev/null @@ -1,275 +0,0 @@ -import sentry_sdk -from sentry_sdk.hub import Hub - -""" -Those tests are meant to check the compatibility of the new scopes in SDK 2.0 with the old Hub/Scope system in SDK 1.x. - -Those tests have been run with the latest SDK 1.x versiona and the data used in the `assert` statements represents -the behvaior of the SDK 1.x. - -This makes sure that we are backwards compatible. (on a best effort basis, there will probably be some edge cases that are not covered here) -""" - - -def test_configure_scope_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with configure_scope` block. - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with sentry_sdk.configure_scope() as scope: # configure scope - sentry_sdk.set_tag("B1", 1) - scope.set_tag("B2", 1) - sentry_sdk.capture_message("Event B") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1} - assert event_z["tags"] == {"A": 1, "B1": 1, "B2": 1, "Z": 1} - - -def test_push_scope_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with push_scope` block - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with sentry_sdk.push_scope() as scope: # push scope - sentry_sdk.set_tag("B1", 1) - scope.set_tag("B2", 1) - sentry_sdk.capture_message("Event B") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1} - assert event_z["tags"] == {"A": 1, "Z": 1} - - -def test_with_hub_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with Hub:` block - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with Hub.current as hub: # with hub - sentry_sdk.set_tag("B1", 1) - hub.scope.set_tag("B2", 1) - sentry_sdk.capture_message("Event B") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1} - assert event_z["tags"] == {"A": 1, "B1": 1, "B2": 1, "Z": 1} - - -def test_with_hub_configure_scope_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with Hub:` containing a `with configure_scope` block - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with Hub.current as hub: # with hub - sentry_sdk.set_tag("B1", 1) - with hub.configure_scope() as scope: # configure scope - sentry_sdk.set_tag("B2", 1) - hub.scope.set_tag("B3", 1) - scope.set_tag("B4", 1) - sentry_sdk.capture_message("Event B") - sentry_sdk.set_tag("B5", 1) - sentry_sdk.capture_message("Event C") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_c, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1} - assert event_c["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1, "B5": 1} - assert event_z["tags"] == { - "A": 1, - "B1": 1, - "B2": 1, - "B3": 1, - "B4": 1, - "B5": 1, - "Z": 1, - } - - -def test_with_hub_push_scope_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with Hub:` containing a `with push_scope` block - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with Hub.current as hub: # with hub - sentry_sdk.set_tag("B1", 1) - with hub.push_scope() as scope: # push scope - sentry_sdk.set_tag("B2", 1) - hub.scope.set_tag("B3", 1) - scope.set_tag("B4", 1) - sentry_sdk.capture_message("Event B") - sentry_sdk.set_tag("B5", 1) - sentry_sdk.capture_message("Event C") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_c, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1} - assert event_c["tags"] == {"A": 1, "B1": 1, "B5": 1} - assert event_z["tags"] == {"A": 1, "B1": 1, "B5": 1, "Z": 1} - - -def test_with_cloned_hub_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with cloned Hub:` block - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with Hub(Hub.current) as hub: # clone hub - sentry_sdk.set_tag("B1", 1) - hub.scope.set_tag("B2", 1) - sentry_sdk.capture_message("Event B") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1} - assert event_z["tags"] == {"A": 1, "Z": 1} - - -def test_with_cloned_hub_configure_scope_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with cloned Hub:` containing a `with configure_scope` block - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with Hub(Hub.current) as hub: # clone hub - sentry_sdk.set_tag("B1", 1) - with hub.configure_scope() as scope: # configure scope - sentry_sdk.set_tag("B2", 1) - hub.scope.set_tag("B3", 1) - scope.set_tag("B4", 1) - sentry_sdk.capture_message("Event B") - sentry_sdk.set_tag("B5", 1) - sentry_sdk.capture_message("Event C") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_c, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1} - assert event_c["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1, "B5": 1} - assert event_z["tags"] == {"A": 1, "Z": 1} - - -def test_with_cloned_hub_push_scope_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with cloned Hub:` containing a `with push_scope` block - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with Hub(Hub.current) as hub: # clone hub - sentry_sdk.set_tag("B1", 1) - with hub.push_scope() as scope: # push scope - sentry_sdk.set_tag("B2", 1) - hub.scope.set_tag("B3", 1) - scope.set_tag("B4", 1) - sentry_sdk.capture_message("Event B") - sentry_sdk.set_tag("B5", 1) - sentry_sdk.capture_message("Event C") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_c, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1} - assert event_c["tags"] == {"A": 1, "B1": 1, "B5": 1} - assert event_z["tags"] == {"A": 1, "Z": 1} diff --git a/tests/new_scopes_compat/test_new_scopes_compat_event.py b/tests/new_scopes_compat/test_new_scopes_compat_event.py deleted file mode 100644 index db1e5fec4b..0000000000 --- a/tests/new_scopes_compat/test_new_scopes_compat_event.py +++ /dev/null @@ -1,503 +0,0 @@ -import pytest - -from unittest import mock - -import sentry_sdk -from sentry_sdk.hub import Hub -from sentry_sdk.integrations import iter_default_integrations -from sentry_sdk.scrubber import EventScrubber, DEFAULT_DENYLIST - - -""" -Those tests are meant to check the compatibility of the new scopes in SDK 2.0 with the old Hub/Scope system in SDK 1.x. - -Those tests have been run with the latest SDK 1.x version and the data used in the `assert` statements represents -the behvaior of the SDK 1.x. - -This makes sure that we are backwards compatible. (on a best effort basis, there will probably be some edge cases that are not covered here) -""" - - -@pytest.fixture -def integrations(): - return [ - integration.identifier - for integration in iter_default_integrations( - with_auto_enabling_integrations=False - ) - ] - - -@pytest.fixture -def expected_error(integrations): - def create_expected_error_event(trx, span): - return { - "level": "warning-X", - "exception": { - "values": [ - { - "mechanism": {"type": "generic", "handled": True}, - "module": None, - "type": "ValueError", - "value": "This is a test exception", - "stacktrace": { - "frames": [ - { - "filename": "tests/new_scopes_compat/test_new_scopes_compat_event.py", - "abs_path": mock.ANY, - "function": "_faulty_function", - "module": "tests.new_scopes_compat.test_new_scopes_compat_event", - "lineno": mock.ANY, - "pre_context": [ - " return create_expected_transaction_event", - "", - "", - "def _faulty_function():", - " try:", - ], - "context_line": ' raise ValueError("This is a test exception")', - "post_context": [ - " except ValueError as ex:", - " sentry_sdk.capture_exception(ex)", - "", - "", - "def _test_before_send(event, hint):", - ], - "vars": { - "ex": mock.ANY, - }, - "in_app": True, - } - ] - }, - } - ] - }, - "event_id": mock.ANY, - "timestamp": mock.ANY, - "contexts": { - "character": { - "name": "Mighty Fighter changed by before_send", - "age": 19, - "attack_type": "melee", - }, - "trace": { - "trace_id": trx.trace_id, - "span_id": span.span_id, - "parent_span_id": span.parent_span_id, - "op": "test_span", - "origin": "manual", - "description": None, - "data": { - "thread.id": mock.ANY, - "thread.name": "MainThread", - }, - }, - "runtime": { - "name": "CPython", - "version": mock.ANY, - "build": mock.ANY, - }, - }, - "user": { - "id": "123", - "email": "jane.doe@example.com", - "ip_address": "[Filtered]", - }, - "transaction": "test_transaction", - "transaction_info": {"source": "custom"}, - "tags": {"tag1": "tag1_value", "tag2": "tag2_value"}, - "extra": { - "extra1": "extra1_value", - "extra2": "extra2_value", - "should_be_removed_by_event_scrubber": "[Filtered]", - "sys.argv": "[Filtered]", - }, - "breadcrumbs": { - "values": [ - { - "category": "error-level", - "message": "Authenticated user %s", - "level": "error", - "data": {"breadcrumb2": "somedata"}, - "timestamp": mock.ANY, - "type": "default", - } - ] - }, - "modules": mock.ANY, - "release": "0.1.2rc3", - "environment": "checking-compatibility-with-sdk1", - "server_name": mock.ANY, - "sdk": { - "name": "sentry.python", - "version": mock.ANY, - "packages": [{"name": "pypi:sentry-sdk", "version": mock.ANY}], - "integrations": integrations, - }, - "platform": "python", - "_meta": { - "user": {"ip_address": {"": {"rem": [["!config", "s"]]}}}, - "extra": { - "should_be_removed_by_event_scrubber": { - "": {"rem": [["!config", "s"]]} - }, - "sys.argv": {"": {"rem": [["!config", "s"]]}}, - }, - }, - } - - return create_expected_error_event - - -@pytest.fixture -def expected_transaction(integrations): - def create_expected_transaction_event(trx, span): - return { - "type": "transaction", - "transaction": "test_transaction changed by before_send_transaction", - "transaction_info": {"source": "custom"}, - "contexts": { - "trace": { - "trace_id": trx.trace_id, - "span_id": trx.span_id, - "parent_span_id": None, - "op": "test_transaction_op", - "origin": "manual", - "description": None, - "data": { - "thread.id": mock.ANY, - "thread.name": "MainThread", - }, - }, - "character": { - "name": "Mighty Fighter changed by before_send_transaction", - "age": 19, - "attack_type": "melee", - }, - "runtime": { - "name": "CPython", - "version": mock.ANY, - "build": mock.ANY, - }, - }, - "tags": {"tag1": "tag1_value", "tag2": "tag2_value"}, - "timestamp": mock.ANY, - "start_timestamp": mock.ANY, - "spans": [ - { - "data": { - "thread.id": mock.ANY, - "thread.name": "MainThread", - }, - "trace_id": trx.trace_id, - "span_id": span.span_id, - "parent_span_id": span.parent_span_id, - "same_process_as_parent": True, - "op": "test_span", - "origin": "manual", - "description": None, - "start_timestamp": mock.ANY, - "timestamp": mock.ANY, - } - ], - "measurements": {"memory_used": {"value": 456, "unit": "byte"}}, - "event_id": mock.ANY, - "level": "warning-X", - "user": { - "id": "123", - "email": "jane.doe@example.com", - "ip_address": "[Filtered]", - }, - "extra": { - "extra1": "extra1_value", - "extra2": "extra2_value", - "should_be_removed_by_event_scrubber": "[Filtered]", - "sys.argv": "[Filtered]", - }, - "release": "0.1.2rc3", - "environment": "checking-compatibility-with-sdk1", - "server_name": mock.ANY, - "sdk": { - "name": "sentry.python", - "version": mock.ANY, - "packages": [{"name": "pypi:sentry-sdk", "version": mock.ANY}], - "integrations": integrations, - }, - "platform": "python", - "_meta": { - "user": {"ip_address": {"": {"rem": [["!config", "s"]]}}}, - "extra": { - "should_be_removed_by_event_scrubber": { - "": {"rem": [["!config", "s"]]} - }, - "sys.argv": {"": {"rem": [["!config", "s"]]}}, - }, - }, - } - - return create_expected_transaction_event - - -def _faulty_function(): - try: - raise ValueError("This is a test exception") - except ValueError as ex: - sentry_sdk.capture_exception(ex) - - -def _test_before_send(event, hint): - event["contexts"]["character"]["name"] += " changed by before_send" - return event - - -def _test_before_send_transaction(event, hint): - event["transaction"] += " changed by before_send_transaction" - event["contexts"]["character"]["name"] += " changed by before_send_transaction" - return event - - -def _test_before_breadcrumb(breadcrumb, hint): - if breadcrumb["category"] == "info-level": - return None - return breadcrumb - - -def _generate_event_data(scope=None): - """ - Generates some data to be used in the events sent by the tests. - """ - sentry_sdk.set_level("warning-X") - - sentry_sdk.add_breadcrumb( - category="info-level", - message="Authenticated user %s", - level="info", - data={"breadcrumb1": "somedata"}, - ) - sentry_sdk.add_breadcrumb( - category="error-level", - message="Authenticated user %s", - level="error", - data={"breadcrumb2": "somedata"}, - ) - - sentry_sdk.set_context( - "character", - { - "name": "Mighty Fighter", - "age": 19, - "attack_type": "melee", - }, - ) - - sentry_sdk.set_extra("extra1", "extra1_value") - sentry_sdk.set_extra("extra2", "extra2_value") - sentry_sdk.set_extra("should_be_removed_by_event_scrubber", "XXX") - - sentry_sdk.set_tag("tag1", "tag1_value") - sentry_sdk.set_tag("tag2", "tag2_value") - - sentry_sdk.set_user( - {"id": "123", "email": "jane.doe@example.com", "ip_address": "211.161.1.124"} - ) - - sentry_sdk.set_measurement("memory_used", 456, "byte") - - if scope is not None: - scope.add_attachment(bytes=b"Hello World", filename="hello.txt") - - -def _init_sentry_sdk(sentry_init): - sentry_init( - environment="checking-compatibility-with-sdk1", - release="0.1.2rc3", - before_send=_test_before_send, - before_send_transaction=_test_before_send_transaction, - before_breadcrumb=_test_before_breadcrumb, - event_scrubber=EventScrubber( - denylist=DEFAULT_DENYLIST - + ["should_be_removed_by_event_scrubber", "sys.argv"] - ), - send_default_pii=False, - traces_sample_rate=1.0, - auto_enabling_integrations=False, - ) - - -# -# The actual Tests start here! -# - - -def test_event(sentry_init, capture_envelopes, expected_error, expected_transaction): - _init_sentry_sdk(sentry_init) - - envelopes = capture_envelopes() - - with sentry_sdk.start_transaction( - name="test_transaction", op="test_transaction_op" - ) as trx: - with sentry_sdk.start_span(op="test_span") as span: - with sentry_sdk.configure_scope() as scope: # configure scope - _generate_event_data(scope) - _faulty_function() - - (error_envelope, transaction_envelope) = envelopes - - error = error_envelope.get_event() - transaction = transaction_envelope.get_transaction_event() - attachment = error_envelope.items[-1] - - assert error == expected_error(trx, span) - assert transaction == expected_transaction(trx, span) - assert attachment.headers == { - "filename": "hello.txt", - "type": "attachment", - "content_type": "text/plain", - } - assert attachment.payload.bytes == b"Hello World" - - -def test_event2(sentry_init, capture_envelopes, expected_error, expected_transaction): - _init_sentry_sdk(sentry_init) - - envelopes = capture_envelopes() - - with Hub(Hub.current): - sentry_sdk.set_tag("A", 1) # will not be added - - with Hub.current: # with hub - with sentry_sdk.push_scope() as scope: - scope.set_tag("B", 1) # will not be added - - with sentry_sdk.start_transaction( - name="test_transaction", op="test_transaction_op" - ) as trx: - with sentry_sdk.start_span(op="test_span") as span: - with sentry_sdk.configure_scope() as scope: # configure scope - _generate_event_data(scope) - _faulty_function() - - (error_envelope, transaction_envelope) = envelopes - - error = error_envelope.get_event() - transaction = transaction_envelope.get_transaction_event() - attachment = error_envelope.items[-1] - - assert error == expected_error(trx, span) - assert transaction == expected_transaction(trx, span) - assert attachment.headers == { - "filename": "hello.txt", - "type": "attachment", - "content_type": "text/plain", - } - assert attachment.payload.bytes == b"Hello World" - - -def test_event3(sentry_init, capture_envelopes, expected_error, expected_transaction): - _init_sentry_sdk(sentry_init) - - envelopes = capture_envelopes() - - with Hub(Hub.current): - sentry_sdk.set_tag("A", 1) # will not be added - - with Hub.current: # with hub - with sentry_sdk.push_scope() as scope: - scope.set_tag("B", 1) # will not be added - - with sentry_sdk.push_scope() as scope: # push scope - with sentry_sdk.start_transaction( - name="test_transaction", op="test_transaction_op" - ) as trx: - with sentry_sdk.start_span(op="test_span") as span: - _generate_event_data(scope) - _faulty_function() - - (error_envelope, transaction_envelope) = envelopes - - error = error_envelope.get_event() - transaction = transaction_envelope.get_transaction_event() - attachment = error_envelope.items[-1] - - assert error == expected_error(trx, span) - assert transaction == expected_transaction(trx, span) - assert attachment.headers == { - "filename": "hello.txt", - "type": "attachment", - "content_type": "text/plain", - } - assert attachment.payload.bytes == b"Hello World" - - -def test_event4(sentry_init, capture_envelopes, expected_error, expected_transaction): - _init_sentry_sdk(sentry_init) - - envelopes = capture_envelopes() - - with Hub(Hub.current): - sentry_sdk.set_tag("A", 1) # will not be added - - with Hub(Hub.current): # with hub clone - with sentry_sdk.push_scope() as scope: - scope.set_tag("B", 1) # will not be added - - with sentry_sdk.start_transaction( - name="test_transaction", op="test_transaction_op" - ) as trx: - with sentry_sdk.start_span(op="test_span") as span: - with sentry_sdk.configure_scope() as scope: # configure scope - _generate_event_data(scope) - _faulty_function() - - (error_envelope, transaction_envelope) = envelopes - - error = error_envelope.get_event() - transaction = transaction_envelope.get_transaction_event() - attachment = error_envelope.items[-1] - - assert error == expected_error(trx, span) - assert transaction == expected_transaction(trx, span) - assert attachment.headers == { - "filename": "hello.txt", - "type": "attachment", - "content_type": "text/plain", - } - assert attachment.payload.bytes == b"Hello World" - - -def test_event5(sentry_init, capture_envelopes, expected_error, expected_transaction): - _init_sentry_sdk(sentry_init) - - envelopes = capture_envelopes() - - with Hub(Hub.current): - sentry_sdk.set_tag("A", 1) # will not be added - - with Hub(Hub.current): # with hub clone - with sentry_sdk.push_scope() as scope: - scope.set_tag("B", 1) # will not be added - - with sentry_sdk.push_scope() as scope: # push scope - with sentry_sdk.start_transaction( - name="test_transaction", op="test_transaction_op" - ) as trx: - with sentry_sdk.start_span(op="test_span") as span: - _generate_event_data(scope) - _faulty_function() - - (error_envelope, transaction_envelope) = envelopes - - error = error_envelope.get_event() - transaction = transaction_envelope.get_transaction_event() - attachment = error_envelope.items[-1] - - assert error == expected_error(trx, span) - assert transaction == expected_transaction(trx, span) - assert attachment.headers == { - "filename": "hello.txt", - "type": "attachment", - "content_type": "text/plain", - } - assert attachment.payload.bytes == b"Hello World" diff --git a/tests/integrations/opentelemetry/__init__.py b/tests/opentelemetry/__init__.py similarity index 100% rename from tests/integrations/opentelemetry/__init__.py rename to tests/opentelemetry/__init__.py diff --git a/tests/opentelemetry/test_compat.py b/tests/opentelemetry/test_compat.py new file mode 100644 index 0000000000..381d9ad22e --- /dev/null +++ b/tests/opentelemetry/test_compat.py @@ -0,0 +1,99 @@ +import sentry_sdk +from sentry_sdk.tracing import Transaction + + +def test_transaction_name_span_description_compat( + sentry_init, + capture_events, +): + sentry_init(traces_sample_rate=1.0) + + events = capture_events() + + with sentry_sdk.start_span( + name="trx-name", + op="trx-op", + ) as trx: + with sentry_sdk.start_span( + description="span-desc", + op="span-op", + ) as spn: + ... + + assert trx.__class__.__name__ == "Span" + assert trx.op == "trx-op" + assert trx.name == "trx-name" + assert trx.description is None + + assert trx._otel_span is not None + assert trx._otel_span.name == "trx-name" + assert trx._otel_span.attributes["sentry.op"] == "trx-op" + assert trx._otel_span.attributes["sentry.name"] == "trx-name" + assert "sentry.description" not in trx._otel_span.attributes + + assert spn.__class__.__name__ == "Span" + assert spn.op == "span-op" + assert spn.description == "span-desc" + assert spn.name == "span-desc" + + assert spn._otel_span is not None + assert spn._otel_span.name == "span-desc" + assert spn._otel_span.attributes["sentry.op"] == "span-op" + assert spn._otel_span.attributes["sentry.description"] == "span-desc" + + transaction = events[0] + assert transaction["transaction"] == "trx-name" + assert transaction["contexts"]["trace"]["op"] == "trx-op" + assert transaction["contexts"]["trace"]["data"]["sentry.op"] == "trx-op" + assert transaction["contexts"]["trace"]["data"]["sentry.name"] == "trx-name" + assert "sentry.description" not in transaction["contexts"]["trace"]["data"] + + span = transaction["spans"][0] + assert span["description"] == "span-desc" + assert span["op"] == "span-op" + assert span["data"]["sentry.op"] == "span-op" + assert span["data"]["sentry.description"] == "span-desc" + + +def test_start_transaction_compat( + sentry_init, + capture_events, +): + sentry_init(traces_sample_rate=1.0) + + events = capture_events() + + with sentry_sdk.start_transaction( + name="trx-name", + op="trx-op", + ): + ... + + transaction = events[0] + assert transaction["transaction"] == "trx-name" + assert transaction["contexts"]["trace"]["op"] == "trx-op" + assert transaction["contexts"]["trace"]["data"]["sentry.op"] == "trx-op" + assert transaction["contexts"]["trace"]["data"]["sentry.name"] == "trx-name" + assert "sentry.description" not in transaction["contexts"]["trace"]["data"] + + +def test_start_transaction_with_explicit_transaction_compat( + sentry_init, + capture_events, +): + """It should still be possible to provide a ready-made Transaction to start_transaction.""" + sentry_init(traces_sample_rate=1.0) + + events = capture_events() + + transaction = Transaction(name="trx-name", op="trx-op") + + with sentry_sdk.start_transaction(transaction=transaction): + pass + + transaction = events[0] + assert transaction["transaction"] == "trx-name" + assert transaction["contexts"]["trace"]["op"] == "trx-op" + assert transaction["contexts"]["trace"]["data"]["sentry.op"] == "trx-op" + assert transaction["contexts"]["trace"]["data"]["sentry.name"] == "trx-name" + assert "sentry.description" not in transaction["contexts"]["trace"]["data"] diff --git a/tests/opentelemetry/test_context_scope_management.py b/tests/opentelemetry/test_context_scope_management.py new file mode 100644 index 0000000000..a4872ef858 --- /dev/null +++ b/tests/opentelemetry/test_context_scope_management.py @@ -0,0 +1,24 @@ +from opentelemetry import trace + +import sentry_sdk +from sentry_sdk.tracing import Span + + +tracer = trace.get_tracer(__name__) + + +def test_scope_span_reference_started_with_sentry(sentry_init): + sentry_init(traces_sample_rate=1.0) + + with sentry_sdk.start_span(name="test") as span: + assert sentry_sdk.get_current_span() == span + assert sentry_sdk.get_current_scope().span == span + + +def test_scope_span_reference_started_with_otel(sentry_init): + sentry_init(traces_sample_rate=1.0) + + with tracer.start_as_current_span("test") as otel_span: + wrapped_span = Span(otel_span=otel_span) + assert sentry_sdk.get_current_span() == wrapped_span + assert sentry_sdk.get_current_scope().span == wrapped_span diff --git a/tests/integrations/opentelemetry/test_entry_points.py b/tests/opentelemetry/test_entry_points.py similarity index 87% rename from tests/integrations/opentelemetry/test_entry_points.py rename to tests/opentelemetry/test_entry_points.py index cd78209432..efadb67a06 100644 --- a/tests/integrations/opentelemetry/test_entry_points.py +++ b/tests/opentelemetry/test_entry_points.py @@ -3,7 +3,7 @@ from unittest.mock import patch from opentelemetry import propagate -from sentry_sdk.integrations.opentelemetry import SentryPropagator +from sentry_sdk.opentelemetry import SentryPropagator def test_propagator_loaded_if_mentioned_in_environment_variable(): diff --git a/tests/opentelemetry/test_potel.py b/tests/opentelemetry/test_potel.py new file mode 100644 index 0000000000..e99b530a71 --- /dev/null +++ b/tests/opentelemetry/test_potel.py @@ -0,0 +1,369 @@ +import pytest +from opentelemetry import trace + +import sentry_sdk +from sentry_sdk.consts import SPANSTATUS, VERSION +from tests.conftest import ApproxDict + + +tracer = trace.get_tracer(__name__) + + +@pytest.fixture(autouse=True) +def sentry_init_potel(sentry_init): + sentry_init(traces_sample_rate=1.0) + + +def test_root_span_transaction_payload_started_with_otel_only(capture_envelopes): + envelopes = capture_envelopes() + + with tracer.start_as_current_span("request"): + pass + + (envelope,) = envelopes + # TODO-neel-potel DSC header + (item,) = envelope.items + payload = item.payload.json + + assert payload["type"] == "transaction" + assert payload["transaction"] == "request" + assert payload["transaction_info"] == {"source": "custom"} + assert payload["timestamp"] is not None + assert payload["start_timestamp"] is not None + + contexts = payload["contexts"] + assert "runtime" in contexts + assert "otel" in contexts + assert "resource" in contexts["otel"] + + trace_context = contexts["trace"] + assert "trace_id" in trace_context + assert "span_id" in trace_context + assert trace_context["origin"] == "manual" + + assert payload["spans"] == [] + + +def test_child_span_payload_started_with_otel_only(capture_envelopes): + envelopes = capture_envelopes() + + with tracer.start_as_current_span("request"): + with tracer.start_as_current_span("db"): + pass + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + (span,) = payload["spans"] + + assert span["description"] == "db" + assert span["origin"] == "manual" + assert span["span_id"] is not None + assert span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert span["timestamp"] is not None + assert span["start_timestamp"] is not None + + +def test_children_span_nesting_started_with_otel_only(capture_envelopes): + envelopes = capture_envelopes() + + with tracer.start_as_current_span("request"): + with tracer.start_as_current_span("db"): + with tracer.start_as_current_span("redis"): + pass + with tracer.start_as_current_span("http"): + pass + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + (db_span, http_span, redis_span) = payload["spans"] + + assert db_span["description"] == "db" + assert redis_span["description"] == "redis" + assert http_span["description"] == "http" + + assert db_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert redis_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert http_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + + assert db_span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert http_span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert redis_span["parent_span_id"] == db_span["span_id"] + + +def test_root_span_transaction_payload_started_with_sentry_only(capture_envelopes): + envelopes = capture_envelopes() + + with sentry_sdk.start_span(description="request"): + pass + + (envelope,) = envelopes + # TODO-neel-potel DSC header + (item,) = envelope.items + payload = item.payload.json + + assert payload["type"] == "transaction" + assert payload["transaction"] == "request" + assert payload["transaction_info"] == {"source": "custom"} + assert payload["timestamp"] is not None + assert payload["start_timestamp"] is not None + + contexts = payload["contexts"] + assert "runtime" in contexts + assert "otel" in contexts + assert "resource" in contexts["otel"] + + trace_context = contexts["trace"] + assert "trace_id" in trace_context + assert "span_id" in trace_context + assert trace_context["origin"] == "manual" + assert trace_context["status"] == "ok" + + assert payload["spans"] == [] + + +def test_child_span_payload_started_with_sentry_only(capture_envelopes): + envelopes = capture_envelopes() + + with sentry_sdk.start_span(description="request"): + with sentry_sdk.start_span(description="db"): + pass + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + (span,) = payload["spans"] + + assert span["description"] == "db" + assert span["origin"] == "manual" + assert span["status"] == "ok" + assert span["span_id"] is not None + assert span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert span["timestamp"] is not None + assert span["start_timestamp"] is not None + + +def test_children_span_nesting_started_with_sentry_only(capture_envelopes): + envelopes = capture_envelopes() + + with sentry_sdk.start_span(description="request"): + with sentry_sdk.start_span(description="db"): + with sentry_sdk.start_span(description="redis"): + pass + with sentry_sdk.start_span(description="http"): + pass + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + (db_span, http_span, redis_span) = payload["spans"] + + assert db_span["description"] == "db" + assert redis_span["description"] == "redis" + assert http_span["description"] == "http" + + assert db_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert redis_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert http_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + + assert db_span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert http_span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert redis_span["parent_span_id"] == db_span["span_id"] + + +def test_children_span_nesting_mixed(capture_envelopes): + envelopes = capture_envelopes() + + with sentry_sdk.start_span(description="request"): + with tracer.start_as_current_span("db"): + with sentry_sdk.start_span(description="redis"): + pass + with tracer.start_as_current_span("http"): + pass + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + (db_span, http_span, redis_span) = payload["spans"] + + assert db_span["description"] == "db" + assert redis_span["description"] == "redis" + assert http_span["description"] == "http" + + assert db_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert redis_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert http_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + + assert db_span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert http_span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert redis_span["parent_span_id"] == db_span["span_id"] + + +def test_span_attributes_in_data_started_with_otel(capture_envelopes): + envelopes = capture_envelopes() + + with tracer.start_as_current_span("request") as request_span: + request_span.set_attributes({"foo": "bar", "baz": 42}) + with tracer.start_as_current_span("db") as db_span: + db_span.set_attributes({"abc": 99, "def": "moo"}) + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + + assert payload["contexts"]["trace"]["data"] == ApproxDict({"foo": "bar", "baz": 42}) + assert payload["spans"][0]["data"] == ApproxDict({"abc": 99, "def": "moo"}) + + +def test_span_data_started_with_sentry(capture_envelopes): + envelopes = capture_envelopes() + + with sentry_sdk.start_span(op="http", description="request") as request_span: + request_span.set_attribute("foo", "bar") + with sentry_sdk.start_span(op="db", description="statement") as db_span: + db_span.set_attribute("baz", 42) + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + + assert payload["contexts"]["trace"]["data"] == ApproxDict( + { + "foo": "bar", + "sentry.origin": "manual", + "sentry.description": "request", + "sentry.op": "http", + } + ) + assert payload["spans"][0]["data"] == ApproxDict( + { + "baz": 42, + "sentry.origin": "manual", + "sentry.description": "statement", + "sentry.op": "db", + } + ) + + +def test_transaction_tags_started_with_otel(capture_envelopes): + envelopes = capture_envelopes() + + sentry_sdk.set_tag("tag.global", 99) + with tracer.start_as_current_span("request"): + sentry_sdk.set_tag("tag.inner", "foo") + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + + assert payload["tags"] == {"tag.global": "99", "tag.inner": "foo"} + + +def test_transaction_tags_started_with_sentry(capture_envelopes): + envelopes = capture_envelopes() + + sentry_sdk.set_tag("tag.global", 99) + with sentry_sdk.start_span(description="request"): + sentry_sdk.set_tag("tag.inner", "foo") + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + + assert payload["tags"] == {"tag.global": "99", "tag.inner": "foo"} + + +def test_multiple_transaction_tags_isolation_scope_started_with_otel(capture_envelopes): + envelopes = capture_envelopes() + + sentry_sdk.set_tag("tag.global", 99) + with sentry_sdk.isolation_scope(): + with tracer.start_as_current_span("request a"): + sentry_sdk.set_tag("tag.inner.a", "a") + with sentry_sdk.isolation_scope(): + with tracer.start_as_current_span("request b"): + sentry_sdk.set_tag("tag.inner.b", "b") + + (payload_a, payload_b) = [envelope.items[0].payload.json for envelope in envelopes] + + assert payload_a["tags"] == {"tag.global": "99", "tag.inner.a": "a"} + assert payload_b["tags"] == {"tag.global": "99", "tag.inner.b": "b"} + + +def test_multiple_transaction_tags_isolation_scope_started_with_sentry( + capture_envelopes, +): + envelopes = capture_envelopes() + + sentry_sdk.set_tag("tag.global", 99) + with sentry_sdk.isolation_scope(): + with sentry_sdk.start_span(description="request a"): + sentry_sdk.set_tag("tag.inner.a", "a") + with sentry_sdk.isolation_scope(): + with sentry_sdk.start_span(description="request b"): + sentry_sdk.set_tag("tag.inner.b", "b") + + (payload_a, payload_b) = [envelope.items[0].payload.json for envelope in envelopes] + + assert payload_a["tags"] == {"tag.global": "99", "tag.inner.a": "a"} + assert payload_b["tags"] == {"tag.global": "99", "tag.inner.b": "b"} + + +def test_potel_span_root_span_references(): + with sentry_sdk.start_span(description="request") as request_span: + assert request_span.is_root_span + assert request_span.root_span == request_span + with sentry_sdk.start_span(description="db") as db_span: + assert not db_span.is_root_span + assert db_span.root_span == request_span + with sentry_sdk.start_span(description="redis") as redis_span: + assert not redis_span.is_root_span + assert redis_span.root_span == request_span + with sentry_sdk.start_span(description="http") as http_span: + assert not http_span.is_root_span + assert http_span.root_span == request_span + + +@pytest.mark.parametrize( + "status_in,status_out", + [ + (None, None), + ("", SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.OK, SPANSTATUS.OK), + (SPANSTATUS.ABORTED, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.ALREADY_EXISTS, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.CANCELLED, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.DATA_LOSS, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.DEADLINE_EXCEEDED, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.FAILED_PRECONDITION, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.INTERNAL_ERROR, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.INVALID_ARGUMENT, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.NOT_FOUND, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.OUT_OF_RANGE, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.PERMISSION_DENIED, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.RESOURCE_EXHAUSTED, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.UNAUTHENTICATED, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.UNAVAILABLE, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.UNIMPLEMENTED, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.UNKNOWN_ERROR, SPANSTATUS.UNKNOWN_ERROR), + ], +) +def test_potel_span_status(status_in, status_out): + span = sentry_sdk.start_span(name="test") + if status_in is not None: + span.set_status(status_in) + + assert span.status == status_out + + +def test_otel_resource(sentry_init): + sentry_init() + + tracer_provider = trace.get_tracer_provider() + resource_attrs = tracer_provider.resource.attributes + assert resource_attrs["service.name"] == "sentry-python" + assert resource_attrs["service.namespace"] == "sentry" + assert resource_attrs["service.version"] == VERSION diff --git a/tests/opentelemetry/test_propagator.py b/tests/opentelemetry/test_propagator.py new file mode 100644 index 0000000000..2f802daafb --- /dev/null +++ b/tests/opentelemetry/test_propagator.py @@ -0,0 +1,276 @@ +from unittest.mock import MagicMock, patch + +import pytest + +from opentelemetry.trace.propagation import get_current_span +from opentelemetry.propagators.textmap import DefaultSetter +from opentelemetry.semconv.trace import SpanAttributes + +import sentry_sdk +from sentry_sdk.consts import MATCH_ALL +from sentry_sdk.opentelemetry.consts import ( + SENTRY_BAGGAGE_KEY, + SENTRY_TRACE_KEY, +) +from sentry_sdk.opentelemetry import SentryPropagator +from tests.conftest import SortedBaggage + + +@pytest.mark.forked +def test_extract_no_context_no_sentry_trace_header(): + """ + No context and NO Sentry trace data in getter. + Extract should return empty context. + """ + carrier = None + context = {} + getter = MagicMock() + getter.get.return_value = None + + modified_context = SentryPropagator().extract(carrier, context, getter) + + assert modified_context == {} + + +@pytest.mark.forked +def test_extract_context_no_sentry_trace_header(): + """ + Context but NO Sentry trace data in getter. + Extract should return context as is. + """ + carrier = None + context = {"some": "value"} + getter = MagicMock() + getter.get.return_value = None + + modified_context = SentryPropagator().extract(carrier, context, getter) + + assert modified_context == context + + +@pytest.mark.forked +def test_extract_empty_context_sentry_trace_header_no_baggage(): + """ + Empty context but Sentry trace data but NO Baggage in getter. + Extract should return context that has empty baggage in it and also a NoopSpan with span_id and trace_id. + """ + carrier = None + context = {} + getter = MagicMock() + getter.get.side_effect = [ + ["1234567890abcdef1234567890abcdef-1234567890abcdef-1"], + None, + ] + + modified_context = SentryPropagator().extract(carrier, context, getter) + + assert len(modified_context.keys()) == 3 + + assert modified_context[SENTRY_TRACE_KEY] == { + "trace_id": "1234567890abcdef1234567890abcdef", + "parent_span_id": "1234567890abcdef", + "parent_sampled": True, + } + assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == "" + + span_context = get_current_span(modified_context).get_span_context() + assert span_context.span_id == int("1234567890abcdef", 16) + assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16) + + +@pytest.mark.forked +def test_extract_context_sentry_trace_header_baggage(): + """ + Empty context but Sentry trace data and Baggage in getter. + Extract should return context that has baggage in it and also a NoopSpan with span_id and trace_id. + """ + baggage_header = ( + "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, " + "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, " + "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;" + ) + + carrier = None + context = {"some": "value"} + getter = MagicMock() + getter.get.side_effect = [ + ["1234567890abcdef1234567890abcdef-1234567890abcdef-1"], + [baggage_header], + ] + + modified_context = SentryPropagator().extract(carrier, context, getter) + + assert len(modified_context.keys()) == 4 + + assert modified_context[SENTRY_TRACE_KEY] == { + "trace_id": "1234567890abcdef1234567890abcdef", + "parent_span_id": "1234567890abcdef", + "parent_sampled": True, + } + + assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == ( + "sentry-trace_id=771a43a4192642f0b136d5159a501700," + "sentry-public_key=49d0f7386ad645858ae85020e393bef3," + "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie" + ) + + span_context = get_current_span(modified_context).get_span_context() + assert span_context.span_id == int("1234567890abcdef", 16) + assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16) + + +def test_inject_continue_trace(sentry_init): + sentry_init(traces_sample_rate=1.0) + + carrier = {} + setter = DefaultSetter() + + trace_id = "771a43a4192642f0b136d5159a501700" + sentry_trace = "771a43a4192642f0b136d5159a501700-1234567890abcdef-1" + baggage = ( + "sentry-trace_id=771a43a4192642f0b136d5159a501700," + "sentry-public_key=frontendpublickey," + "sentry-sample_rate=0.01337," + "sentry-sampled=true," + "sentry-release=myfrontend," + "sentry-environment=bird," + "sentry-transaction=bar" + ) + incoming_headers = { + "HTTP_SENTRY_TRACE": sentry_trace, + "HTTP_BAGGAGE": baggage, + } + + expected_baggage = baggage + ",sentry-sample_rand=0.001111" + + with patch( + "sentry_sdk.tracing_utils.Random.uniform", + return_value=0.001111, + ): + with sentry_sdk.continue_trace(incoming_headers): + with sentry_sdk.start_span(name="foo") as span: + SentryPropagator().inject(carrier, setter=setter) + assert carrier["sentry-trace"] == f"{trace_id}-{span.span_id}-1" + assert carrier["baggage"] == SortedBaggage(expected_baggage) + + +def test_inject_continue_trace_incoming_sample_rand(sentry_init): + sentry_init(traces_sample_rate=1.0) + + carrier = {} + setter = DefaultSetter() + + trace_id = "771a43a4192642f0b136d5159a501700" + sentry_trace = "771a43a4192642f0b136d5159a501700-1234567890abcdef-1" + baggage = ( + "sentry-trace_id=771a43a4192642f0b136d5159a501700," + "sentry-public_key=frontendpublickey," + "sentry-sample_rate=0.01337," + "sentry-sampled=true," + "sentry-release=myfrontend," + "sentry-environment=bird," + "sentry-transaction=bar," + "sentry-sample_rand=0.002849" + ) + incoming_headers = { + "HTTP_SENTRY_TRACE": sentry_trace, + "HTTP_BAGGAGE": baggage, + } + + with sentry_sdk.continue_trace(incoming_headers): + with sentry_sdk.start_span(name="foo") as span: + SentryPropagator().inject(carrier, setter=setter) + assert carrier["sentry-trace"] == f"{trace_id}-{span.span_id}-1" + assert carrier["baggage"] == SortedBaggage(baggage) + + +def test_inject_head_sdk(sentry_init): + sentry_init(traces_sample_rate=1.0, release="release") + + carrier = {} + setter = DefaultSetter() + + expected_baggage = ( + "sentry-transaction=foo," + "sentry-release=release," + "sentry-environment=production," + "sentry-trace_id={trace_id}," + "sentry-sample_rate=1.0," + "sentry-sampled=true," + "sentry-sample_rand=0.111111" + ) + + with patch( + "sentry_sdk.tracing_utils.Random.uniform", + return_value=0.111111, + ): + with sentry_sdk.start_span(name="foo") as span: + SentryPropagator().inject(carrier, setter=setter) + assert carrier["sentry-trace"] == f"{span.trace_id}-{span.span_id}-1" + assert carrier["baggage"] == SortedBaggage( + expected_baggage.format(trace_id=span.trace_id) + ) + + +@pytest.mark.parametrize( + "trace_propagation_targets,url,trace_propagated", + [ + # No targets - should not propagate + ([], "https://example.com/api/users", False), + (None, "https://example.com/api/users", False), + # MATCH_ALL - should propagate + ([MATCH_ALL], "https://example.com/api/users", True), + # Exact match - should propagate + (["https://example.com"], "https://example.com/api/users", True), + (["https://example.com/"], "https://example.com/api/users", True), + # No match - should not propagate + (["https://example.com"], "https://other-domain.com/api/users", False), + (["https://example.com/"], "https://other-domain.com/api/users", False), + # Regex patterns + ( + ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"], + "https://good.example.net/api", + True, + ), + ( + ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"], + "https://example.net/api", + False, + ), + # HTTP vs HTTPS + (["https://example.com"], "http://example.com/api/users", False), + (["http://example.com"], "https://example.com/api/users", False), + # Path matching + (["https://example.com/api"], "https://example.com/api/users", True), + (["https://example.com/api"], "https://example.com/other/path", False), + ], +) +def test_propagator_trace_propagation_targets( + sentry_init, + trace_propagation_targets, + url, + trace_propagated, +): + """Test that the propagator respects trace_propagation_targets for HTTP spans.""" + sentry_init( + trace_propagation_targets=trace_propagation_targets, + traces_sample_rate=1.0, + ) + + carrier = {} + setter = DefaultSetter() + + # Create a real HTTP span with the test URL + with sentry_sdk.start_span(name="http.client") as span: + span.set_attribute(SpanAttributes.HTTP_METHOD, "GET") + span.set_attribute(SpanAttributes.HTTP_URL, url) + + # Test the propagator + SentryPropagator().inject(carrier, setter=setter) + + if trace_propagated: + assert "sentry-trace" in carrier + assert "baggage" in carrier + else: + assert "sentry-trace" not in carrier + assert "baggage" not in carrier diff --git a/tests/opentelemetry/test_sampler.py b/tests/opentelemetry/test_sampler.py new file mode 100644 index 0000000000..4ca1e1963f --- /dev/null +++ b/tests/opentelemetry/test_sampler.py @@ -0,0 +1,334 @@ +import pytest +from unittest import mock + +from opentelemetry import trace + +import sentry_sdk + + +USE_DEFAULT_TRACES_SAMPLE_RATE = -1 + +tracer = trace.get_tracer(__name__) + + +@pytest.mark.parametrize( + "traces_sample_rate, expected_num_of_envelopes", + [ + # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=0 will be used) + (USE_DEFAULT_TRACES_SAMPLE_RATE, 0), + # traces_sample_rate=None means do not create new traces, and also do not continue incoming traces. So, no envelopes at all. + (None, 0), + # traces_sample_rate=0 means do not create new traces (0% of the requests), but continue incoming traces. So envelopes will be created only if there is an incoming trace. + (0, 0), + # traces_sample_rate=1 means create new traces for 100% of requests (and also continue incoming traces, of course). + (1, 2), + ], +) +def test_sampling_traces_sample_rate_0_or_100( + sentry_init, + capture_envelopes, + traces_sample_rate, + expected_num_of_envelopes, +): + kwargs = {} + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: + kwargs["traces_sample_rate"] = traces_sample_rate + + sentry_init(**kwargs) + + envelopes = capture_envelopes() + + with sentry_sdk.start_span(description="request a"): + with sentry_sdk.start_span(description="cache a"): + with sentry_sdk.start_span(description="db a"): + ... + + with sentry_sdk.start_span(description="request b"): + with sentry_sdk.start_span(description="cache b"): + with sentry_sdk.start_span(description="db b"): + ... + + assert len(envelopes) == expected_num_of_envelopes + + if expected_num_of_envelopes == 2: + (transaction_a, transaction_b) = [ + envelope.items[0].payload.json for envelope in envelopes + ] + + assert transaction_a["transaction"] == "request a" + assert transaction_b["transaction"] == "request b" + + spans_a = transaction_a["spans"] + assert len(spans_a) == 2 + assert spans_a[0]["description"] == "cache a" + assert spans_a[1]["description"] == "db a" + spans_b = transaction_b["spans"] + assert len(spans_b) == 2 + assert spans_b[0]["description"] == "cache b" + assert spans_b[1]["description"] == "db b" + + +def test_sampling_traces_sample_rate_50(sentry_init, capture_envelopes): + sentry_init(traces_sample_rate=0.5) + + envelopes = capture_envelopes() + + with mock.patch( + "sentry_sdk.tracing_utils.Random.uniform", return_value=0.2 + ): # drop + with sentry_sdk.start_span(description="request a"): + with sentry_sdk.start_span(description="cache a"): + with sentry_sdk.start_span(description="db a"): + ... + + with mock.patch( + "sentry_sdk.tracing_utils.Random.uniform", return_value=0.7 + ): # keep + with sentry_sdk.start_span(description="request b"): + with sentry_sdk.start_span(description="cache b"): + with sentry_sdk.start_span(description="db b"): + ... + + assert len(envelopes) == 1 + + (envelope,) = envelopes + transaction = envelope.items[0].payload.json + assert transaction["transaction"] == "request a" + spans = transaction["spans"] + assert len(spans) == 2 + assert spans[0]["description"] == "cache a" + assert spans[1]["description"] == "db a" + + +def test_sampling_traces_sampler(sentry_init, capture_envelopes): + def keep_only_a(sampling_context): + if " a" in sampling_context["transaction_context"]["name"]: + return 1 + else: + return 0 + + sentry_init(traces_sampler=keep_only_a) + + envelopes = capture_envelopes() + + # children inherit from root spans + with sentry_sdk.start_span(description="request a"): # keep + with sentry_sdk.start_span(description="cache a"): + with sentry_sdk.start_span(description="db a"): + ... + + with sentry_sdk.start_span(description="request b"): # drop + with sentry_sdk.start_span(description="cache b"): + with sentry_sdk.start_span(description="db b"): + ... + + with sentry_sdk.start_span(description="request c"): # drop + with sentry_sdk.start_span(description="cache a c"): + with sentry_sdk.start_span(description="db a c"): + ... + + with sentry_sdk.start_span(description="new a c"): # keep + with sentry_sdk.start_span(description="cache c"): + with sentry_sdk.start_span(description="db c"): + ... + + assert len(envelopes) == 2 + (envelope1, envelope2) = envelopes + transaction1 = envelope1.items[0].payload.json + transaction2 = envelope2.items[0].payload.json + + assert transaction1["transaction"] == "request a" + assert len(transaction1["spans"]) == 2 + assert transaction2["transaction"] == "new a c" + assert len(transaction2["spans"]) == 2 + + +def test_sampling_traces_sampler_boolean(sentry_init, capture_envelopes): + def keep_only_a(sampling_context): + if " a" in sampling_context["transaction_context"]["name"]: + return True + else: + return False + + sentry_init( + traces_sample_rate=1.0, + traces_sampler=keep_only_a, + ) + + envelopes = capture_envelopes() + + with sentry_sdk.start_span(description="request a"): # keep + with sentry_sdk.start_span(description="cache a"): + with sentry_sdk.start_span(description="db X"): + ... + + with sentry_sdk.start_span(description="request b"): # drop + with sentry_sdk.start_span(description="cache b"): + with sentry_sdk.start_span(description="db b"): + ... + + assert len(envelopes) == 1 + (envelope,) = envelopes + transaction = envelope.items[0].payload.json + + assert transaction["transaction"] == "request a" + assert len(transaction["spans"]) == 2 + + +@pytest.mark.parametrize( + "traces_sample_rate, expected_num_of_envelopes", + [ + # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=None will be used) + (USE_DEFAULT_TRACES_SAMPLE_RATE, 0), + # traces_sample_rate=None means do not create new traces, and also do not continue incoming traces. So, no envelopes at all. + (None, 0), + # traces_sample_rate=0 means do not create new traces (0% of the requests), but continue incoming traces. So envelopes will be created only if there is an incoming trace. + (0, 1), + # traces_sample_rate=1 means create new traces for 100% of requests (and also continue incoming traces, of course). + (1, 1), + ], +) +def test_sampling_parent_sampled( + sentry_init, + traces_sample_rate, + expected_num_of_envelopes, + capture_envelopes, +): + kwargs = {} + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: + kwargs["traces_sample_rate"] = traces_sample_rate + + sentry_init(**kwargs) + + envelopes = capture_envelopes() + + # The upstream service has sampled the request + headers = { + "sentry-trace": "771a43a4192642f0b136d5159a501700-1234567890abcdef-1", + } + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span(description="request a"): + with sentry_sdk.start_span(description="cache a"): + with sentry_sdk.start_span(description="db X"): + ... + + assert len(envelopes) == expected_num_of_envelopes + + if expected_num_of_envelopes == 1: + (envelope,) = envelopes + transaction = envelope.items[0].payload.json + assert transaction["transaction"] == "request a" + assert ( + transaction["contexts"]["trace"]["trace_id"] + == "771a43a4192642f0b136d5159a501700" + ) + assert transaction["contexts"]["trace"]["span_id"] != "1234567890abcdef" + assert transaction["contexts"]["trace"]["parent_span_id"] == "1234567890abcdef" + + +@pytest.mark.parametrize( + "traces_sample_rate, upstream_sampled, expected_num_of_envelopes", + [ + # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=None will be used) + (USE_DEFAULT_TRACES_SAMPLE_RATE, 0, 0), + (USE_DEFAULT_TRACES_SAMPLE_RATE, 1, 0), + # traces_sample_rate=None means do not create new traces, and also do not continue incoming traces. So, no envelopes at all. + (None, 0, 0), + (None, 1, 0), + # traces_sample_rate=0 means do not create new traces (0% of the requests), but continue incoming traces. So envelopes will be created only if there is an incoming trace. + (0, 0, 0), + (0, 1, 1), + # traces_sample_rate=1 means create new traces for 100% of requests (and also continue incoming traces, of course). + (1, 0, 0), + (1, 1, 1), + ], +) +def test_sampling_parent_dropped( + sentry_init, + traces_sample_rate, + upstream_sampled, + expected_num_of_envelopes, + capture_envelopes, +): + kwargs = {} + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: + kwargs["traces_sample_rate"] = traces_sample_rate + + sentry_init(**kwargs) + + envelopes = capture_envelopes() + + # The upstream service has dropped the request + headers = { + "sentry-trace": f"771a43a4192642f0b136d5159a501700-1234567890abcdef-{upstream_sampled}", + } + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span(description="request a"): + with sentry_sdk.start_span(description="cache a"): + with sentry_sdk.start_span(description="db X"): + ... + + assert len(envelopes) == expected_num_of_envelopes + + if expected_num_of_envelopes == 1: + (envelope,) = envelopes + transaction = envelope.items[0].payload.json + assert transaction["transaction"] == "request a" + assert ( + transaction["contexts"]["trace"]["trace_id"] + == "771a43a4192642f0b136d5159a501700" + ) + assert transaction["contexts"]["trace"]["span_id"] != "1234567890abcdef" + assert transaction["contexts"]["trace"]["parent_span_id"] == "1234567890abcdef" + + +@pytest.mark.parametrize( + "traces_sample_rate, expected_num_of_envelopes", + [ + # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=0 will be used) + (USE_DEFAULT_TRACES_SAMPLE_RATE, 0), + # traces_sample_rate=None means do not create new traces, and also do not continue incoming traces. So, no envelopes at all. + (None, 0), + # traces_sample_rate=0 means do not create new traces (0% of the requests), but continue incoming traces. So envelopes will be created only if there is an incoming trace. + (0, 0), + # traces_sample_rate=1 means create new traces for 100% of requests (and also continue incoming traces, of course). + (1, 1), + ], +) +def test_sampling_parent_deferred( + sentry_init, + traces_sample_rate, + expected_num_of_envelopes, + capture_envelopes, +): + kwargs = {} + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: + kwargs["traces_sample_rate"] = traces_sample_rate + + sentry_init(**kwargs) + + envelopes = capture_envelopes() + + # The upstream service has deferred the sampling decision to us. + headers = { + "sentry-trace": "771a43a4192642f0b136d5159a501700-1234567890abcdef-", + } + + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span(description="request a"): + with sentry_sdk.start_span(description="cache a"): + with sentry_sdk.start_span(description="db X"): + ... + + assert len(envelopes) == expected_num_of_envelopes + + if expected_num_of_envelopes == 1: + (envelope,) = envelopes + transaction = envelope.items[0].payload.json + assert transaction["transaction"] == "request a" + assert ( + transaction["contexts"]["trace"]["trace_id"] + == "771a43a4192642f0b136d5159a501700" + ) + assert transaction["contexts"]["trace"]["span_id"] != "1234567890abcdef" + assert transaction["contexts"]["trace"]["parent_span_id"] == "1234567890abcdef" diff --git a/tests/opentelemetry/test_span_processor.py b/tests/opentelemetry/test_span_processor.py new file mode 100644 index 0000000000..7d6283d4ea --- /dev/null +++ b/tests/opentelemetry/test_span_processor.py @@ -0,0 +1,19 @@ +import sentry_sdk + + +def test_span_processor_omits_underscore_attributes(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0) + + events = capture_events() + + with sentry_sdk.start_span(): + with sentry_sdk.start_span() as span: + span.set_attribute("_internal", 47) + span.set_attribute("noninternal", 23) + + assert span._otel_span.attributes["_internal"] == 47 + assert span._otel_span.attributes["noninternal"] == 23 + + outgoing_span = events[0]["spans"][0] + assert "_internal" not in outgoing_span["data"] + assert "noninternal" in outgoing_span["data"] diff --git a/tests/opentelemetry/test_utils.py b/tests/opentelemetry/test_utils.py new file mode 100644 index 0000000000..6313e6ccc1 --- /dev/null +++ b/tests/opentelemetry/test_utils.py @@ -0,0 +1,363 @@ +from unittest.mock import MagicMock + +import pytest +from opentelemetry.trace import SpanKind, Status, StatusCode +from opentelemetry.version import __version__ as OTEL_VERSION + +from sentry_sdk.opentelemetry.utils import ( + extract_span_data, + extract_span_status, +) +from sentry_sdk.utils import parse_version + +OTEL_VERSION = parse_version(OTEL_VERSION) + + +@pytest.mark.parametrize( + "name, status, attributes, expected", + [ + ( + "OTel Span Blank", + Status(StatusCode.UNSET), + {}, + { + "op": None, + "description": "OTel Span Blank", + "status": None, + "http_status_code": None, + "origin": None, + }, + ), + ( + "OTel Span RPC", + Status(StatusCode.UNSET), + { + "rpc.service": "myservice.EchoService", + }, + { + "op": "rpc", + "description": "OTel Span RPC", + "status": None, + "http_status_code": None, + "origin": None, + }, + ), + ( + "OTel Span Messaging", + Status(StatusCode.UNSET), + { + "messaging.system": "rabbitmq", + }, + { + "op": "message", + "description": "OTel Span Messaging", + "status": None, + "http_status_code": None, + "origin": None, + }, + ), + ( + "OTel Span FaaS", + Status(StatusCode.UNSET), + { + "faas.trigger": "pubsub", + }, + { + "op": "pubsub", + "description": "OTel Span FaaS", + "status": None, + "http_status_code": None, + "origin": None, + }, + ), + ], +) +def test_extract_span_data(name, status, attributes, expected): + otel_span = MagicMock() + otel_span.name = name + otel_span.status = Status(StatusCode.UNSET) + otel_span.attributes = attributes + + span_data = extract_span_data(otel_span) + result = { + "op": span_data.op, + "description": span_data.description, + "status": span_data.status, + "http_status_code": span_data.http_status, + "origin": span_data.origin, + } + assert result == expected + + +@pytest.mark.parametrize( + "kind, status, attributes, expected", + [ + ( + SpanKind.CLIENT, + Status(StatusCode.OK), + { + "http.method": "GET", + "http.target": None, # no location for description + "net.peer.name": None, + "http.url": None, + }, + { + "op": "http.client", + "description": "GET", + "status": "ok", + "http_status_code": None, + "origin": None, + }, + ), + ( + SpanKind.CLIENT, + Status(StatusCode.OK), + { + "http.method": "GET", + "http.target": "/target", # this can be the location in the description + }, + { + "op": "http.client", + "description": "GET /target", + "status": "ok", + "http_status_code": None, + "origin": None, + }, + ), + ( + SpanKind.CLIENT, + Status(StatusCode.OK), + { + "http.method": "GET", + "net.peer.name": "example.com", # this can be the location in the description + }, + { + "op": "http.client", + "description": "GET example.com", + "status": "ok", + "http_status_code": None, + "origin": None, + }, + ), + ( + SpanKind.CLIENT, + Status(StatusCode.OK), + { + "http.method": "GET", + "http.target": "/target", # target takes precedence over net.peer.name + "net.peer.name": "example.com", + }, + { + "op": "http.client", + "description": "GET /target", + "status": "ok", + "http_status_code": None, + "origin": None, + }, + ), + ( + SpanKind.CLIENT, + Status(StatusCode.OK), + { + "http.method": "GET", + "http.url": "https://username:secretpwd@example.com/bla/?secret=123&anothersecret=456", # sensitive data is stripped + }, + { + "op": "http.client", + "description": "GET https://example.com/bla/", + "status": "ok", + "http_status_code": None, + "origin": None, + }, + ), + ], +) +def test_span_data_for_http_method(kind, status, attributes, expected): + otel_span = MagicMock() + otel_span.kind = kind + otel_span.status = status + otel_span.attributes = attributes + + span_data = extract_span_data(otel_span) + + result = { + "op": span_data.op, + "description": span_data.description, + "status": span_data.status, + "http_status_code": span_data.http_status, + "origin": span_data.origin, + } + assert result == expected + + +def test_span_data_for_db_query(): + otel_span = MagicMock() + otel_span.name = "OTel Span" + otel_span.attributes = {} + otel_span.status = Status(StatusCode.UNSET) + + span_data = extract_span_data(otel_span) + assert span_data.op is None + assert span_data.description == "OTel Span" + assert span_data.status is None + assert span_data.http_status is None + assert span_data.origin is None + + otel_span.attributes = { + "db.system": "mysql", + "db.statement": "SELECT * FROM table;", + } + + span_data = extract_span_data(otel_span) + assert span_data.op == "db" + assert span_data.description == "SELECT * FROM table;" + assert span_data.status is None + assert span_data.http_status is None + assert span_data.origin is None + + +@pytest.mark.parametrize( + "kind, status, attributes, expected", + [ + ( + SpanKind.SERVER, + Status(StatusCode.UNSET), + { + "http.method": "POST", + "http.route": "/some/route", + }, + { + "status": None, + "http_status_code": None, + }, + ), + ( + SpanKind.SERVER, + Status(StatusCode.UNSET), + { + "http.method": "POST", + "http.route": "/some/route", + "http.status_code": 502, # Take this status in case of UNSET status + }, + { + "status": "internal_error", + "http_status_code": 502, + }, + ), + ( + SpanKind.SERVER, + Status(StatusCode.UNSET), + { + "http.method": "POST", + "http.route": "/some/route", + "http.status_code": 502, + "http.response.status_code": 503, # this takes precedence over deprecated http.status_code + }, + { + "status": "unavailable", + "http_status_code": 503, + # old otel versions won't take the new attribute into account + "status_old": "internal_error", + "http_status_code_old": 502, + }, + ), + ( + SpanKind.SERVER, + Status(StatusCode.OK), # OK status is taken right away + { + "http.method": "POST", + "http.route": "/some/route", + }, + { + "status": "ok", + "http_status_code": None, + }, + ), + ( + SpanKind.SERVER, + Status(StatusCode.OK), # OK status is taken right away + { + "http.method": "POST", + "http.route": "/some/route", + "http.response.status_code": 200, + "http.status_code": 200, + }, + { + "status": "ok", + "http_status_code": 200, + }, + ), + ( + SpanKind.SERVER, + Status( + StatusCode.ERROR + ), # Error status without description gets the http status from attributes + { + "http.method": "POST", + "http.route": "/some/route", + "http.response.status_code": 401, + "http.status_code": 401, + }, + { + "status": "unauthenticated", + "http_status_code": 401, + }, + ), + ( + SpanKind.SERVER, + Status(StatusCode.ERROR, "I'm a teapot"), + { + "http.method": "POST", + "http.route": "/some/route", + "http.response.status_code": 418, + "http.status_code": 418, + }, + { + "status": "invalid_argument", + "http_status_code": 418, + }, + ), + ( + SpanKind.SERVER, + Status( + StatusCode.ERROR, "unimplemented" + ), # Error status with known description is taken (grpc errors) + { + "http.method": "POST", + "http.route": "/some/route", + }, + { + "status": "unimplemented", + "http_status_code": None, + }, + ), + ], +) +def test_extract_span_status(kind, status, attributes, expected): + otel_span = MagicMock() + otel_span.kind = kind + otel_span.status = status + otel_span.attributes = attributes + + status, http_status_code = extract_span_status(otel_span) + result = { + "status": status, + "http_status_code": http_status_code, + } + + if ( + OTEL_VERSION < (1, 21) + and "status_old" in expected + and "http_status_code_old" in expected + ): + expected = { + "status": expected["status_old"], + "http_status_code": expected["http_status_code_old"], + } + else: + expected = { + "status": expected["status"], + "http_status_code": expected["http_status_code"], + } + + assert result == expected diff --git a/tests/profiler/test_continuous_profiler.py b/tests/profiler/test_continuous_profiler.py index 7283ec7164..3f50e679d1 100644 --- a/tests/profiler/test_continuous_profiler.py +++ b/tests/profiler/test_continuous_profiler.py @@ -11,9 +11,7 @@ get_profiler_id, setup_continuous_profiler, start_profiler, - start_profile_session, stop_profiler, - stop_profile_session, ) from tests.conftest import ApproxDict @@ -26,25 +24,16 @@ requires_gevent = pytest.mark.skipif(gevent is None, reason="gevent not enabled") -def get_client_options(use_top_level_profiler_mode): +def get_client_options(): def client_options( mode=None, auto_start=None, profile_session_sample_rate=1.0, lifecycle="manual" ): - if use_top_level_profiler_mode: - return { - "profile_lifecycle": lifecycle, - "profiler_mode": mode, - "profile_session_sample_rate": profile_session_sample_rate, - "_experiments": { - "continuous_profiling_auto_start": auto_start, - }, - } return { "profile_lifecycle": lifecycle, + "profiler_mode": mode, "profile_session_sample_rate": profile_session_sample_rate, "_experiments": { "continuous_profiling_auto_start": auto_start, - "continuous_profiling_mode": mode, }, } @@ -62,8 +51,7 @@ def client_options( @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) def test_continuous_profiler_invalid_mode(mode, make_options, teardown_profiling): @@ -85,8 +73,7 @@ def test_continuous_profiler_invalid_mode(mode, make_options, teardown_profiling @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) def test_continuous_profiler_valid_mode(mode, make_options, teardown_profiling): @@ -108,8 +95,7 @@ def test_continuous_profiler_valid_mode(mode, make_options, teardown_profiling): @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) def test_continuous_profiler_setup_twice(mode, make_options, teardown_profiling): @@ -214,26 +200,10 @@ def assert_single_transaction_without_profile_chunks(envelopes): pytest.param("gevent", marks=requires_gevent), ], ) -@pytest.mark.parametrize( - ["start_profiler_func", "stop_profiler_func"], - [ - pytest.param( - start_profile_session, - stop_profile_session, - id="start_profile_session/stop_profile_session (deprecated)", - ), - pytest.param( - start_profiler, - stop_profiler, - id="start_profiler/stop_profiler", - ), - ], -) @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) @mock.patch("sentry_sdk.profiler.continuous_profiler.PROFILE_BUFFER_SECONDS", 0.01) @@ -241,8 +211,6 @@ def test_continuous_profiler_auto_start_and_manual_stop( sentry_init, capture_envelopes, mode, - start_profiler_func, - stop_profiler_func, make_options, teardown_profiling, ): @@ -256,30 +224,30 @@ def test_continuous_profiler_auto_start_and_manual_stop( thread = threading.current_thread() - with sentry_sdk.start_transaction(name="profiling"): + with sentry_sdk.start_span(name="profiling"): with sentry_sdk.start_span(op="op"): - time.sleep(0.05) + time.sleep(0.1) assert_single_transaction_with_profile_chunks(envelopes, thread) for _ in range(3): - stop_profiler_func() + stop_profiler() envelopes.clear() - with sentry_sdk.start_transaction(name="profiling"): + with sentry_sdk.start_span(name="profiling"): with sentry_sdk.start_span(op="op"): - time.sleep(0.05) + time.sleep(0.1) assert_single_transaction_without_profile_chunks(envelopes) - start_profiler_func() + start_profiler() envelopes.clear() - with sentry_sdk.start_transaction(name="profiling"): + with sentry_sdk.start_span(name="profiling"): with sentry_sdk.start_span(op="op"): - time.sleep(0.05) + time.sleep(0.1) assert_single_transaction_with_profile_chunks(envelopes, thread) @@ -291,26 +259,10 @@ def test_continuous_profiler_auto_start_and_manual_stop( pytest.param("gevent", marks=requires_gevent), ], ) -@pytest.mark.parametrize( - ["start_profiler_func", "stop_profiler_func"], - [ - pytest.param( - start_profile_session, - stop_profile_session, - id="start_profile_session/stop_profile_session (deprecated)", - ), - pytest.param( - start_profiler, - stop_profiler, - id="start_profiler/stop_profiler", - ), - ], -) @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) @mock.patch("sentry_sdk.profiler.continuous_profiler.PROFILE_BUFFER_SECONDS", 0.01) @@ -318,8 +270,6 @@ def test_continuous_profiler_manual_start_and_stop_sampled( sentry_init, capture_envelopes, mode, - start_profiler_func, - stop_profiler_func, make_options, teardown_profiling, ): @@ -336,11 +286,11 @@ def test_continuous_profiler_manual_start_and_stop_sampled( thread = threading.current_thread() for _ in range(3): - start_profiler_func() + start_profiler() envelopes.clear() - with sentry_sdk.start_transaction(name="profiling"): + with sentry_sdk.start_span(name="profiling"): assert get_profiler_id() is not None, "profiler should be running" with sentry_sdk.start_span(op="op"): time.sleep(0.1) @@ -350,14 +300,14 @@ def test_continuous_profiler_manual_start_and_stop_sampled( assert get_profiler_id() is not None, "profiler should be running" - stop_profiler_func() + stop_profiler() # the profiler stops immediately in manual mode assert get_profiler_id() is None, "profiler should not be running" envelopes.clear() - with sentry_sdk.start_transaction(name="profiling"): + with sentry_sdk.start_span(name="profiling"): assert get_profiler_id() is None, "profiler should not be running" with sentry_sdk.start_span(op="op"): time.sleep(0.1) @@ -373,34 +323,16 @@ def test_continuous_profiler_manual_start_and_stop_sampled( pytest.param("gevent", marks=requires_gevent), ], ) -@pytest.mark.parametrize( - ["start_profiler_func", "stop_profiler_func"], - [ - pytest.param( - start_profile_session, - stop_profile_session, - id="start_profile_session/stop_profile_session (deprecated)", - ), - pytest.param( - start_profiler, - stop_profiler, - id="start_profiler/stop_profiler", - ), - ], -) @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) def test_continuous_profiler_manual_start_and_stop_unsampled( sentry_init, capture_envelopes, mode, - start_profiler_func, - stop_profiler_func, make_options, teardown_profiling, ): @@ -414,15 +346,15 @@ def test_continuous_profiler_manual_start_and_stop_unsampled( envelopes = capture_envelopes() - start_profiler_func() + start_profiler() - with sentry_sdk.start_transaction(name="profiling"): + with sentry_sdk.start_span(name="profiling"): with sentry_sdk.start_span(op="op"): time.sleep(0.05) assert_single_transaction_without_profile_chunks(envelopes) - stop_profiler_func() + stop_profiler() @pytest.mark.parametrize( @@ -435,8 +367,7 @@ def test_continuous_profiler_manual_start_and_stop_unsampled( @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) @mock.patch("sentry_sdk.profiler.continuous_profiler.DEFAULT_SAMPLING_FREQUENCY", 21) @@ -466,10 +397,12 @@ def test_continuous_profiler_auto_start_and_stop_sampled( profiler_ids = set() - with sentry_sdk.start_transaction(name="profiling 1"): + with sentry_sdk.start_span(name="profiling 1"): + assert get_profiler_id() is not None, "profiler should be running" profiler_id = get_profiler_id() assert profiler_id is not None, "profiler should be running" profiler_ids.add(profiler_id) + with sentry_sdk.start_span(op="op"): time.sleep(0.1) profiler_id = get_profiler_id() @@ -484,7 +417,7 @@ def test_continuous_profiler_auto_start_and_stop_sampled( assert profiler_id is not None, "profiler should be running" profiler_ids.add(profiler_id) - with sentry_sdk.start_transaction(name="profiling 2"): + with sentry_sdk.start_span(name="profiling 2"): profiler_id = get_profiler_id() assert profiler_id is not None, "profiler should be running" profiler_ids.add(profiler_id) @@ -518,8 +451,7 @@ def test_continuous_profiler_auto_start_and_stop_sampled( @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) @mock.patch("sentry_sdk.profiler.continuous_profiler.PROFILE_BUFFER_SECONDS", 0.01) @@ -543,7 +475,7 @@ def test_continuous_profiler_auto_start_and_stop_unsampled( for _ in range(3): envelopes.clear() - with sentry_sdk.start_transaction(name="profiling"): + with sentry_sdk.start_span(name="profiling"): assert get_profiler_id() is None, "profiler should not be running" with sentry_sdk.start_span(op="op"): time.sleep(0.05) @@ -564,33 +496,15 @@ def test_continuous_profiler_auto_start_and_stop_unsampled( ), ], ) -@pytest.mark.parametrize( - ["start_profiler_func", "stop_profiler_func"], - [ - pytest.param( - start_profile_session, - stop_profile_session, - id="start_profile_session/stop_profile_session (deprecated)", - ), - pytest.param( - start_profiler, - stop_profiler, - id="start_profiler/stop_profiler", - ), - ], -) @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) def test_continuous_profiler_manual_start_and_stop_noop_when_using_trace_lifecyle( sentry_init, mode, - start_profiler_func, - stop_profiler_func, class_name, make_options, teardown_profiling, @@ -606,11 +520,11 @@ def test_continuous_profiler_manual_start_and_stop_noop_when_using_trace_lifecyl with mock.patch( f"sentry_sdk.profiler.continuous_profiler.{class_name}.ensure_running" ) as mock_ensure_running: - start_profiler_func() + start_profiler() mock_ensure_running.assert_not_called() with mock.patch( f"sentry_sdk.profiler.continuous_profiler.{class_name}.teardown" ) as mock_teardown: - stop_profiler_func() + stop_profiler() mock_teardown.assert_not_called() diff --git a/tests/profiler/test_transaction_profiler.py b/tests/profiler/test_transaction_profiler.py index 142fd7d78c..0dc4e82af5 100644 --- a/tests/profiler/test_transaction_profiler.py +++ b/tests/profiler/test_transaction_profiler.py @@ -1,6 +1,5 @@ import inspect import os -import sentry_sdk import sys import threading import time @@ -10,7 +9,7 @@ import pytest -from sentry_sdk import start_transaction +from sentry_sdk import start_span from sentry_sdk.profiler.transaction_profiler import ( GeventScheduler, Profile, @@ -40,30 +39,13 @@ def process_test_sample(sample): return [(tid, (stack, stack)) for tid, stack in sample] -def non_experimental_options(mode=None, sample_rate=None): - return {"profiler_mode": mode, "profiles_sample_rate": sample_rate} - - -def experimental_options(mode=None, sample_rate=None): - return { - "_experiments": {"profiler_mode": mode, "profiles_sample_rate": sample_rate} - } - - @pytest.mark.parametrize( "mode", [pytest.param("foo")], ) -@pytest.mark.parametrize( - "make_options", - [ - pytest.param(experimental_options, id="experiment"), - pytest.param(non_experimental_options, id="non experimental"), - ], -) -def test_profiler_invalid_mode(mode, make_options, teardown_profiling): +def test_profiler_invalid_mode(mode, teardown_profiling): with pytest.raises(ValueError): - setup_profiler(make_options(mode)) + setup_profiler({"profiler_mode": mode}) @pytest.mark.parametrize( @@ -74,30 +56,16 @@ def test_profiler_invalid_mode(mode, make_options, teardown_profiling): pytest.param("gevent", marks=requires_gevent), ], ) -@pytest.mark.parametrize( - "make_options", - [ - pytest.param(experimental_options, id="experiment"), - pytest.param(non_experimental_options, id="non experimental"), - ], -) -def test_profiler_valid_mode(mode, make_options, teardown_profiling): +def test_profiler_valid_mode(mode, teardown_profiling): # should not raise any exceptions - setup_profiler(make_options(mode)) + setup_profiler({"profiler_mode": mode}) -@pytest.mark.parametrize( - "make_options", - [ - pytest.param(experimental_options, id="experiment"), - pytest.param(non_experimental_options, id="non experimental"), - ], -) -def test_profiler_setup_twice(make_options, teardown_profiling): +def test_profiler_setup_twice(teardown_profiling): # setting up the first time should return True to indicate success - assert setup_profiler(make_options()) + assert setup_profiler({}) # setting up the second time should return False to indicate no-op - assert not setup_profiler(make_options()) + assert not setup_profiler({}) @pytest.mark.parametrize( @@ -117,13 +85,6 @@ def test_profiler_setup_twice(make_options, teardown_profiling): pytest.param(None, 0, id="profiler not enabled"), ], ) -@pytest.mark.parametrize( - "make_options", - [ - pytest.param(experimental_options, id="experiment"), - pytest.param(non_experimental_options, id="non experimental"), - ], -) @mock.patch("sentry_sdk.profiler.transaction_profiler.PROFILE_MINIMUM_SAMPLES", 0) def test_profiles_sample_rate( sentry_init, @@ -132,15 +93,12 @@ def test_profiles_sample_rate( teardown_profiling, profiles_sample_rate, profile_count, - make_options, mode, ): - options = make_options(mode=mode, sample_rate=profiles_sample_rate) sentry_init( traces_sample_rate=1.0, - profiler_mode=options.get("profiler_mode"), - profiles_sample_rate=options.get("profiles_sample_rate"), - _experiments=options.get("_experiments", {}), + profiler_mode=mode, + profiles_sample_rate=profiles_sample_rate, ) envelopes = capture_envelopes() @@ -149,7 +107,7 @@ def test_profiles_sample_rate( with mock.patch( "sentry_sdk.profiler.transaction_profiler.random.random", return_value=0.5 ): - with start_transaction(name="profiling"): + with start_span(name="profiling"): pass items = defaultdict(list) @@ -212,6 +170,7 @@ def test_profiles_sampler( sentry_init( traces_sample_rate=1.0, profiles_sampler=profiles_sampler, + profiler_mode=mode, ) envelopes = capture_envelopes() @@ -220,7 +179,7 @@ def test_profiles_sampler( with mock.patch( "sentry_sdk.profiler.transaction_profiler.random.random", return_value=0.5 ): - with start_transaction(name="profiling"): + with start_span(name="profiling"): pass items = defaultdict(list) @@ -244,13 +203,13 @@ def test_minimum_unique_samples_required( ): sentry_init( traces_sample_rate=1.0, - _experiments={"profiles_sample_rate": 1.0}, + profiles_sample_rate=1.0, ) envelopes = capture_envelopes() record_lost_event_calls = capture_record_lost_event_calls() - with start_transaction(name="profiling"): + with start_span(name="profiling"): pass items = defaultdict(list) @@ -273,12 +232,12 @@ def test_profile_captured( ): sentry_init( traces_sample_rate=1.0, - _experiments={"profiles_sample_rate": 1.0}, + profiles_sample_rate=1.0, ) envelopes = capture_envelopes() - with start_transaction(name="profiling"): + with start_span(name="profiling"): time.sleep(0.05) items = defaultdict(list) @@ -664,16 +623,13 @@ def test_max_profile_duration_reached(scheduler_class): class NoopScheduler(Scheduler): - def setup(self): - # type: () -> None + def setup(self) -> None: pass - def teardown(self): - # type: () -> None + def teardown(self) -> None: pass - def ensure_running(self): - # type: () -> None + def ensure_running(self) -> None: pass @@ -817,24 +773,6 @@ def test_profile_processing( assert processed["samples"] == expected["samples"] -def test_hub_backwards_compatibility(suppress_deprecation_warnings): - hub = sentry_sdk.Hub() - - with pytest.warns(DeprecationWarning): - profile = Profile(True, 0, hub=hub) - - with pytest.warns(DeprecationWarning): - assert profile.hub is hub - - new_hub = sentry_sdk.Hub() - - with pytest.warns(DeprecationWarning): - profile.hub = new_hub - - with pytest.warns(DeprecationWarning): - assert profile.hub is new_hub - - def test_no_warning_without_hub(): with warnings.catch_warnings(): warnings.simplefilter("error") diff --git a/tests/test_ai_monitoring.py b/tests/test_ai_monitoring.py index ee757f82cd..05135b97d5 100644 --- a/tests/test_ai_monitoring.py +++ b/tests/test_ai_monitoring.py @@ -16,7 +16,7 @@ def tool(**kwargs): def pipeline(): tool() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="pipeline"): pipeline() transaction = events[0] @@ -43,7 +43,7 @@ def tool(**kwargs): def pipeline(): tool() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="pipeline"): pipeline(sentry_tags={"user": "colin"}, sentry_data={"some_data": "value"}) transaction = events[0] @@ -74,7 +74,7 @@ async def async_tool(**kwargs): async def async_pipeline(): await async_tool() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="async_pipeline"): await async_pipeline() transaction = events[0] @@ -102,7 +102,7 @@ async def async_tool(**kwargs): async def async_pipeline(): await async_tool() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="async_pipeline"): await async_pipeline( sentry_tags={"user": "czyber"}, sentry_data={"some_data": "value"} ) diff --git a/tests/test_api.py b/tests/test_api.py index acc33cdf4c..c2aa3ae71f 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,66 +1,43 @@ import pytest -import re from unittest import mock -import sentry_sdk from sentry_sdk import ( capture_exception, continue_trace, + new_trace, get_baggage, get_client, get_current_span, get_traceparent, is_initialized, - start_transaction, + start_span, set_tags, - configure_scope, - push_scope, get_global_scope, - get_current_scope, get_isolation_scope, + set_tag, ) from sentry_sdk.client import Client, NonRecordingClient +from tests.conftest import SortedBaggage -def test_get_current_span(): - fake_scope = mock.MagicMock() - fake_scope.span = mock.MagicMock() - assert get_current_span(fake_scope) == fake_scope.span - - fake_scope.span = None - assert get_current_span(fake_scope) is None - - -def test_get_current_span_default_hub(sentry_init): - sentry_init() - - assert get_current_span() is None - - scope = get_current_scope() - fake_span = mock.MagicMock() - scope.span = fake_span - - assert get_current_span() == fake_span - - -def test_get_current_span_default_hub_with_transaction(sentry_init): +def test_get_current_span_current_scope_with_span(sentry_init): sentry_init() assert get_current_span() is None - with start_transaction() as new_transaction: - assert get_current_span() == new_transaction + with start_span() as new_span: + assert get_current_span() == new_span def test_traceparent_with_tracing_enabled(sentry_init): sentry_init(traces_sample_rate=1.0) - with start_transaction() as transaction: + with start_span() as span: expected_traceparent = "%s-%s-1" % ( - transaction.trace_id, - transaction.span_id, + span.trace_id, + span.span_id, ) assert get_traceparent() == expected_traceparent @@ -84,42 +61,78 @@ def test_baggage_with_tracing_disabled(sentry_init): propagation_context.trace_id ) ) - assert get_baggage() == expected_baggage + assert get_baggage() == SortedBaggage(expected_baggage) def test_baggage_with_tracing_enabled(sentry_init): sentry_init(traces_sample_rate=1.0, release="1.0.0", environment="dev") - with start_transaction() as transaction: - expected_baggage_re = r"^sentry-trace_id={},sentry-sample_rand=0\.\d{{6}},sentry-environment=dev,sentry-release=1\.0\.0,sentry-sample_rate=1\.0,sentry-sampled={}$".format( - transaction.trace_id, "true" if transaction.sampled else "false" - ) - assert re.match(expected_baggage_re, get_baggage()) + with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.111111): + with start_span(name="foo") as span: + expected_baggage = f"sentry-transaction=foo,sentry-trace_id={span.trace_id},sentry-sample_rand=0.111111,sentry-environment=dev,sentry-release=1.0.0,sentry-sample_rate=1.0,sentry-sampled=true" # noqa: E231 + assert get_baggage() == SortedBaggage(expected_baggage) def test_continue_trace(sentry_init): - sentry_init() + sentry_init(traces_sample_rate=1.0) trace_id = "471a43a4192642f0b136d5159a501701" parent_span_id = "6e8f22c393e68f19" parent_sampled = 1 - transaction = continue_trace( + + with continue_trace( { "sentry-trace": "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled), - "baggage": "sentry-trace_id=566e3688a61d4bc888951642d6f14a19,sentry-sample_rand=0.123456", + "baggage": "sentry-trace_id=566e3688a61d4bc888951642d6f14a19,sentry-sample_rand=0.123456", # noqa: E231 }, - name="some name", - ) - with start_transaction(transaction): - assert transaction.name == "some name" - - propagation_context = get_isolation_scope()._propagation_context - assert propagation_context.trace_id == transaction.trace_id == trace_id - assert propagation_context.parent_span_id == parent_span_id - assert propagation_context.parent_sampled == parent_sampled - assert propagation_context.dynamic_sampling_context == { - "trace_id": "566e3688a61d4bc888951642d6f14a19", - "sample_rand": "0.123456", - } + ): + with start_span(name="some name") as span: + assert span.name == "some name" + propagation_context = get_isolation_scope()._propagation_context + assert propagation_context.trace_id == span.trace_id == trace_id + assert propagation_context.parent_span_id == parent_span_id + assert propagation_context.parent_sampled == parent_sampled + assert propagation_context.dynamic_sampling_context == { + "trace_id": "566e3688a61d4bc888951642d6f14a19", + "sample_rand": "0.123456", + } + + +def test_continue_trace_without_headers_starts_new_trace(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + with start_span(name="parent"): + with start_span(name="child"): + with continue_trace({}): + with start_span(name="parent2"): + with start_span(name="child2"): + pass + + assert len(events) == 2 + (tx1, tx2) = events + assert tx1["transaction"] == "parent2" + assert tx1["spans"][0]["description"] == "child2" + assert tx2["transaction"] == "parent" + assert tx2["spans"][0]["description"] == "child" + + +def test_new_trace(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + with start_span(name="parent"): + with start_span(name="child"): + with new_trace(): + with start_span(name="parent2"): + with start_span(name="child2"): + pass + + assert len(events) == 2 + (tx1, tx2) = events + assert tx1["transaction"] == "parent2" + assert tx1["spans"][0]["description"] == "child2" + assert tx2["transaction"] == "parent" + assert tx2["spans"][0]["description"] == "child" def test_is_initialized(): @@ -179,29 +192,52 @@ def test_set_tags(sentry_init, capture_events): }, "Updating tags with empty dict changed tags" -def test_configure_scope_deprecation(): - with pytest.warns(DeprecationWarning): - with configure_scope(): - ... +@pytest.mark.parametrize( + ("key", "value", "expected"), + [ + ("int", 123, "123"), + ("float", 123.456, "123.456"), + ("bool", True, "True"), + ("none", None, "None"), + ("list", [1, 2, 3], "[1, 2, 3]"), + ], +) +def test_set_tag_converts_to_string(sentry_init, capture_events, key, value, expected): + """Test that the api.set_tag function converts values to strings.""" + sentry_init() + events = capture_events() + set_tag(key, value) + raise_and_capture() -def test_push_scope_deprecation(): - with pytest.warns(DeprecationWarning): - with push_scope(): - ... + (event,) = events + tags = event.get("tags", {}) + assert tags[key] == expected -def test_init_context_manager_deprecation(): - with pytest.warns(DeprecationWarning): - with sentry_sdk.init(): - ... +def test_set_tags_converts_to_string(sentry_init, capture_events): + """Test that the api.set_tags function converts values to strings.""" + sentry_init() + events = capture_events() -def test_init_enter_deprecation(): - with pytest.warns(DeprecationWarning): - sentry_sdk.init().__enter__() + set_tags( + { + "int": 456, + "float": 789.012, + "bool": False, + "tuple": (1, 2, 3), + "string": "already_string", + } + ) + + raise_and_capture() + (*_, event) = events + tags = event.get("tags", {}) -def test_init_exit_deprecation(): - with pytest.warns(DeprecationWarning): - sentry_sdk.init().__exit__(None, None, None) + assert tags["int"] == "456" + assert tags["float"] == "789.012" + assert tags["bool"] == "False" + assert tags["tuple"] == "(1, 2, 3)" + assert tags["string"] == "already_string" diff --git a/tests/test_basics.py b/tests/test_basics.py index 2eeba78216..7872fb7e7b 100644 --- a/tests/test_basics.py +++ b/tests/test_basics.py @@ -1,29 +1,25 @@ -import datetime import importlib import logging import os import sys import time from collections import Counter +from datetime import datetime, timedelta, timezone import pytest -from sentry_sdk.client import Client -from sentry_sdk.utils import datetime_from_isoformat import sentry_sdk import sentry_sdk.scope from sentry_sdk import ( get_client, - push_scope, capture_event, capture_exception, capture_message, - start_transaction, + start_span, last_event_id, add_breadcrumb, isolation_scope, new_scope, - Hub, ) from sentry_sdk.integrations import ( _AUTO_ENABLING_INTEGRATIONS, @@ -35,8 +31,7 @@ from sentry_sdk.integrations.logging import LoggingIntegration from sentry_sdk.integrations.stdlib import StdlibIntegration from sentry_sdk.scope import add_global_event_processor -from sentry_sdk.utils import get_sdk_name, reraise -from sentry_sdk.tracing_utils import has_tracing_enabled +from sentry_sdk.utils import datetime_from_isoformat, get_sdk_name, reraise class NoOpIntegration(Integration): @@ -47,10 +42,10 @@ class NoOpIntegration(Integration): identifier = "noop" @staticmethod - def setup_once(): # type: () -> None + def setup_once() -> None: pass - def __eq__(self, __value): # type: (object) -> bool + def __eq__(self, __value: object) -> bool: """ All instances of NoOpIntegration should be considered equal to each other. """ @@ -177,7 +172,7 @@ def before_send_transaction(event, hint): traces_sample_rate=1.0, ) events = capture_events() - transaction = start_transaction(name="foo") + transaction = start_span(name="foo") transaction.finish() (event,) = events @@ -194,7 +189,7 @@ def before_send_transaction_discard(event, hint): traces_sample_rate=1.0, ) events = capture_events() - transaction = start_transaction(name="foo") + transaction = start_span(name="foo") transaction.finish() assert len(events) == 0 @@ -252,32 +247,6 @@ def do_this(): assert crumb["type"] == "default" -@pytest.mark.parametrize( - "enable_tracing, traces_sample_rate, tracing_enabled, updated_traces_sample_rate", - [ - (None, None, False, None), - (False, 0.0, False, 0.0), - (False, 1.0, False, 1.0), - (None, 1.0, True, 1.0), - (True, 1.0, True, 1.0), - (None, 0.0, True, 0.0), # We use this as - it's configured but turned off - (True, 0.0, True, 0.0), # We use this as - it's configured but turned off - (True, None, True, 1.0), - ], -) -def test_option_enable_tracing( - sentry_init, - enable_tracing, - traces_sample_rate, - tracing_enabled, - updated_traces_sample_rate, -): - sentry_init(enable_tracing=enable_tracing, traces_sample_rate=traces_sample_rate) - options = sentry_sdk.get_client().options - assert has_tracing_enabled(options) is tracing_enabled - assert options["traces_sample_rate"] == updated_traces_sample_rate - - def test_breadcrumb_arguments(sentry_init, capture_events): assert_hint = {"bar": 42} @@ -297,76 +266,6 @@ def before_breadcrumb(crumb, hint): add_breadcrumb(crumb=dict(foo=42)) -def test_push_scope(sentry_init, capture_events, suppress_deprecation_warnings): - sentry_init() - events = capture_events() - - with push_scope() as scope: - scope.level = "warning" - try: - 1 / 0 - except Exception as e: - capture_exception(e) - - (event,) = events - - assert event["level"] == "warning" - assert "exception" in event - - -def test_push_scope_null_client( - sentry_init, capture_events, suppress_deprecation_warnings -): - """ - This test can be removed when we remove push_scope and the Hub from the SDK. - """ - sentry_init() - events = capture_events() - - Hub.current.bind_client(None) - - with push_scope() as scope: - scope.level = "warning" - try: - 1 / 0 - except Exception as e: - capture_exception(e) - - assert len(events) == 0 - - -@pytest.mark.skip( - reason="This test is not valid anymore, because push_scope just returns the isolation scope. This test should be removed once the Hub is removed" -) -@pytest.mark.parametrize("null_client", (True, False)) -def test_push_scope_callback(sentry_init, null_client, capture_events): - """ - This test can be removed when we remove push_scope and the Hub from the SDK. - """ - sentry_init() - - if null_client: - Hub.current.bind_client(None) - - outer_scope = Hub.current.scope - - calls = [] - - @push_scope - def _(scope): - assert scope is Hub.current.scope - assert scope is not outer_scope - calls.append(1) - - # push_scope always needs to execute the callback regardless of - # client state, because that actually runs usercode in it, not - # just scope config code - assert calls == [1] - - # Assert scope gets popped correctly - assert Hub.current.scope is outer_scope - - def test_breadcrumbs(sentry_init, capture_events): sentry_init(max_breadcrumbs=10) events = capture_events() @@ -400,12 +299,12 @@ def test_breadcrumbs(sentry_init, capture_events): def test_breadcrumb_ordering(sentry_init, capture_events): sentry_init() events = capture_events() - now = datetime.datetime.now(datetime.timezone.utc).replace(microsecond=0) + now = datetime.now(timezone.utc).replace(microsecond=0) timestamps = [ - now - datetime.timedelta(days=10), - now - datetime.timedelta(days=8), - now - datetime.timedelta(days=12), + now - timedelta(days=10), + now - timedelta(days=8), + now - timedelta(days=12), ] for timestamp in timestamps: @@ -429,24 +328,24 @@ def test_breadcrumb_ordering(sentry_init, capture_events): def test_breadcrumb_ordering_different_types(sentry_init, capture_events): sentry_init() events = capture_events() - now = datetime.datetime.now(datetime.timezone.utc) + now = datetime.now(timezone.utc) timestamps = [ - now - datetime.timedelta(days=10), - now - datetime.timedelta(days=8), - now.replace(microsecond=0) - datetime.timedelta(days=12), - now - datetime.timedelta(days=9), - now - datetime.timedelta(days=13), - now.replace(microsecond=0) - datetime.timedelta(days=11), + now - timedelta(days=10), + now - timedelta(days=8), + now.replace(microsecond=0) - timedelta(days=12), + now - timedelta(days=9), + now - timedelta(days=13), + now.replace(microsecond=0) - timedelta(days=11), ] breadcrumb_timestamps = [ timestamps[0], timestamps[1].isoformat(), - datetime.datetime.strftime(timestamps[2], "%Y-%m-%dT%H:%M:%S") + "Z", - datetime.datetime.strftime(timestamps[3], "%Y-%m-%dT%H:%M:%S.%f") + "+00:00", - datetime.datetime.strftime(timestamps[4], "%Y-%m-%dT%H:%M:%S.%f") + "+0000", - datetime.datetime.strftime(timestamps[5], "%Y-%m-%dT%H:%M:%S.%f") + "-0000", + datetime.strftime(timestamps[2], "%Y-%m-%dT%H:%M:%S") + "Z", + datetime.strftime(timestamps[3], "%Y-%m-%dT%H:%M:%S.%f") + "+00:00", + datetime.strftime(timestamps[4], "%Y-%m-%dT%H:%M:%S.%f") + "+0000", + datetime.strftime(timestamps[5], "%Y-%m-%dT%H:%M:%S.%f") + "-0000", ] for i, timestamp in enumerate(timestamps): @@ -637,71 +536,6 @@ def test_integrations( } == expected_integrations -@pytest.mark.skip( - reason="This test is not valid anymore, because with the new Scopes calling bind_client on the Hub sets the client on the global scope. This test should be removed once the Hub is removed" -) -def test_client_initialized_within_scope(sentry_init, caplog): - """ - This test can be removed when we remove push_scope and the Hub from the SDK. - """ - caplog.set_level(logging.WARNING) - - sentry_init() - - with push_scope(): - Hub.current.bind_client(Client()) - - (record,) = (x for x in caplog.records if x.levelname == "WARNING") - - assert record.msg.startswith("init() called inside of pushed scope.") - - -@pytest.mark.skip( - reason="This test is not valid anymore, because with the new Scopes the push_scope just returns the isolation scope. This test should be removed once the Hub is removed" -) -def test_scope_leaks_cleaned_up(sentry_init, caplog): - """ - This test can be removed when we remove push_scope and the Hub from the SDK. - """ - caplog.set_level(logging.WARNING) - - sentry_init() - - old_stack = list(Hub.current._stack) - - with push_scope(): - push_scope() - - assert Hub.current._stack == old_stack - - (record,) = (x for x in caplog.records if x.levelname == "WARNING") - - assert record.message.startswith("Leaked 1 scopes:") - - -@pytest.mark.skip( - reason="This test is not valid anymore, because with the new Scopes there is not pushing and popping of scopes. This test should be removed once the Hub is removed" -) -def test_scope_popped_too_soon(sentry_init, caplog): - """ - This test can be removed when we remove push_scope and the Hub from the SDK. - """ - caplog.set_level(logging.ERROR) - - sentry_init() - - old_stack = list(Hub.current._stack) - - with push_scope(): - Hub.current.pop_scope_unsafe() - - assert Hub.current._stack == old_stack - - (record,) = (x for x in caplog.records if x.levelname == "ERROR") - - assert record.message == ("Scope popped too soon. Popped 1 scopes too many.") - - def test_scope_event_processor_order(sentry_init, capture_events): def before_send(event, hint): event["message"] += "baz" @@ -821,7 +655,7 @@ def foo(event, hint): capture_message("dropped") - with start_transaction(name="dropped"): + with start_span(name="dropped"): pass assert len(events) == 0 @@ -914,32 +748,36 @@ def _hello_world(word): def test_functions_to_trace(sentry_init, capture_events): - functions_to_trace = [ - {"qualified_name": "tests.test_basics._hello_world"}, - {"qualified_name": "time.sleep"}, - ] - - sentry_init( - traces_sample_rate=1.0, - functions_to_trace=functions_to_trace, - ) + original_sleep = time.sleep + try: + functions_to_trace = [ + {"qualified_name": "tests.test_basics._hello_world"}, + {"qualified_name": "time.sleep"}, + ] + + sentry_init( + traces_sample_rate=1.0, + functions_to_trace=functions_to_trace, + ) - events = capture_events() + events = capture_events() - with start_transaction(name="something"): - time.sleep(0) + with start_span(name="something"): + time.sleep(0) - for word in ["World", "You"]: - _hello_world(word) + for word in ["World", "You"]: + _hello_world(word) - assert len(events) == 1 + assert len(events) == 1 - (event,) = events + (event,) = events - assert len(event["spans"]) == 3 - assert event["spans"][0]["description"] == "time.sleep" - assert event["spans"][1]["description"] == "tests.test_basics._hello_world" - assert event["spans"][2]["description"] == "tests.test_basics._hello_world" + assert len(event["spans"]) == 3 + assert event["spans"][0]["description"] == "time.sleep" + assert event["spans"][1]["description"] == "tests.test_basics._hello_world" + assert event["spans"][2]["description"] == "tests.test_basics._hello_world" + finally: + time.sleep = original_sleep class WorldGreeter: @@ -962,7 +800,7 @@ def test_functions_to_trace_with_class(sentry_init, capture_events): events = capture_events() - with start_transaction(name="something"): + with start_span(name="something"): wg = WorldGreeter("World") wg.greet() wg.greet("You") @@ -1007,7 +845,7 @@ def test_staticmethod_class_tracing(sentry_init, capture_events): events = capture_events() - with sentry_sdk.start_transaction(name="test"): + with sentry_sdk.start_span(name="test"): assert TracingTestClass.static(1) == 1 (event,) = events @@ -1031,7 +869,7 @@ def test_staticmethod_instance_tracing(sentry_init, capture_events): events = capture_events() - with sentry_sdk.start_transaction(name="test"): + with sentry_sdk.start_span(name="test"): assert TracingTestClass().static(1) == 1 (event,) = events @@ -1055,7 +893,7 @@ def test_classmethod_class_tracing(sentry_init, capture_events): events = capture_events() - with sentry_sdk.start_transaction(name="test"): + with sentry_sdk.start_span(name="test"): assert TracingTestClass.class_(1) == (TracingTestClass, 1) (event,) = events @@ -1079,7 +917,7 @@ def test_classmethod_instance_tracing(sentry_init, capture_events): events = capture_events() - with sentry_sdk.start_transaction(name="test"): + with sentry_sdk.start_span(name="test"): assert TracingTestClass().class_(1) == (TracingTestClass, 1) (event,) = events @@ -1091,7 +929,7 @@ def test_classmethod_instance_tracing(sentry_init, capture_events): def test_last_event_id(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) assert last_event_id() is None @@ -1101,42 +939,24 @@ def test_last_event_id(sentry_init): def test_last_event_id_transaction(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) assert last_event_id() is None - with start_transaction(name="test"): + with start_span(name="test"): pass assert last_event_id() is None, "Transaction should not set last_event_id" def test_last_event_id_scope(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) # Should not crash with isolation_scope() as scope: assert scope.last_event_id() is None -def test_hub_constructor_deprecation_warning(): - with pytest.warns(sentry_sdk.hub.SentryHubDeprecationWarning): - Hub() - - -def test_hub_current_deprecation_warning(): - with pytest.warns(sentry_sdk.hub.SentryHubDeprecationWarning) as warning_records: - Hub.current - - # Make sure we only issue one deprecation warning - assert len(warning_records) == 1 - - -def test_hub_main_deprecation_warnings(): - with pytest.warns(sentry_sdk.hub.SentryHubDeprecationWarning): - Hub.main - - @pytest.mark.skipif(sys.version_info < (3, 11), reason="add_note() not supported") def test_notes(sentry_init, capture_events): sentry_init() diff --git a/tests/test_breadcrumbs.py b/tests/test_breadcrumbs.py new file mode 100644 index 0000000000..391c24cfc7 --- /dev/null +++ b/tests/test_breadcrumbs.py @@ -0,0 +1,86 @@ +from unittest import mock + +import sentry_sdk + + +def test_breadcrumbs(sentry_init, capture_events): + """ + This test illustrates how breadcrumbs are added to the error event when an error occurs + """ + sentry_init( + traces_sample_rate=1.0, + ) + events = capture_events() + + add_breadcrumbs_kwargs = { + "type": "navigation", + "category": "unit_tests.breadcrumbs", + "level": "fatal", + "origin": "unit-tests", + "data": { + "string": "foobar", + "number": 4.2, + "array": [1, 2, 3], + "dict": {"foo": "bar"}, + }, + } + + with sentry_sdk.start_span(name="trx-breadcrumbs"): + sentry_sdk.add_breadcrumb(message="breadcrumb0", **add_breadcrumbs_kwargs) + + with sentry_sdk.start_span(name="span1", op="function"): + sentry_sdk.add_breadcrumb(message="breadcrumb1", **add_breadcrumbs_kwargs) + + with sentry_sdk.start_span(name="span2", op="function"): + sentry_sdk.add_breadcrumb( + message="breadcrumb2", **add_breadcrumbs_kwargs + ) + + with sentry_sdk.start_span(name="span3", op="function"): + sentry_sdk.add_breadcrumb( + message="breadcrumb3", **add_breadcrumbs_kwargs + ) + + try: + 1 / 0 + except ZeroDivisionError as ex: + sentry_sdk.capture_exception(ex) + + assert len(events) == 2 + error = events[0] + + breadcrumbs = error["breadcrumbs"]["values"] + + for crumb in breadcrumbs: + print(crumb) + + assert len(breadcrumbs) == 4 + + # Check for my custom breadcrumbs + for i in range(0, 3): + assert breadcrumbs[i]["message"] == f"breadcrumb{i}" + assert breadcrumbs[i]["type"] == "navigation" + assert breadcrumbs[i]["category"] == "unit_tests.breadcrumbs" + assert breadcrumbs[i]["level"] == "fatal" + assert breadcrumbs[i]["origin"] == "unit-tests" + assert breadcrumbs[i]["data"] == { + "string": "foobar", + "number": 4.2, + "array": [1, 2, 3], + "dict": {"foo": "bar"}, + } + assert breadcrumbs[i]["timestamp"] == mock.ANY + + # Check for custom breadcrumbs on span3 + assert breadcrumbs[3]["message"] == "breadcrumb3" + assert breadcrumbs[3]["type"] == "navigation" + assert breadcrumbs[3]["category"] == "unit_tests.breadcrumbs" + assert breadcrumbs[3]["level"] == "fatal" + assert breadcrumbs[3]["origin"] == "unit-tests" + assert breadcrumbs[3]["data"] == { + "string": "foobar", + "number": 4.2, + "array": [1, 2, 3], + "dict": {"foo": "bar"}, + } + assert breadcrumbs[3]["timestamp"] == mock.ANY diff --git a/tests/test_client.py b/tests/test_client.py index 0468fcbb7b..b69a6a0f3f 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -5,18 +5,16 @@ import sys import time from collections import Counter, defaultdict -from collections.abc import Mapping from textwrap import dedent from unittest import mock +from typing import Optional, Union, Mapping, Callable import pytest import sentry_sdk from sentry_sdk import ( - Hub, Client, add_breadcrumb, - configure_scope, capture_message, capture_exception, capture_event, @@ -28,13 +26,7 @@ from sentry_sdk.transport import Transport from sentry_sdk.serializer import MAX_DATABAG_BREADTH from sentry_sdk.consts import DEFAULT_MAX_BREADCRUMBS, DEFAULT_MAX_VALUE_LENGTH - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from collections.abc import Callable - from typing import Any, Optional, Union - from sentry_sdk._types import Event +from sentry_sdk.types import Event maximum_python_312 = pytest.mark.skipif( @@ -381,13 +373,6 @@ def test_socks_proxy(testcase, http2): ) -def test_simple_transport(sentry_init): - events = [] - sentry_init(transport=events.append) - capture_message("Hello World!") - assert events[0]["message"] == "Hello World!" - - def test_ignore_errors(sentry_init, capture_events): sentry_init(ignore_errors=[ZeroDivisionError]) events = capture_events() @@ -605,39 +590,12 @@ def capture_envelope(self, envelope): ) start = time.time() - output = subprocess.check_output([sys.executable, str(app)]) + subprocess.check_output([sys.executable, str(app)]) end = time.time() # Each message takes at least 0.1 seconds to process assert int(end - start) >= num_messages / 10 - assert output.count(b"HI") == num_messages - - -def test_configure_scope_available( - sentry_init, request, monkeypatch, suppress_deprecation_warnings -): - """ - Test that scope is configured if client is configured - - This test can be removed once configure_scope and the Hub are removed. - """ - sentry_init() - - with configure_scope() as scope: - assert scope is Hub.current.scope - scope.set_tag("foo", "bar") - - calls = [] - - def callback(scope): - calls.append(scope) - scope.set_tag("foo", "bar") - - assert configure_scope(callback) is None - assert len(calls) == 1 - assert calls[0] is Hub.current.scope - @pytest.mark.tests_internal_exceptions def test_client_debug_option_enabled(sentry_init, caplog): @@ -657,27 +615,6 @@ def test_client_debug_option_disabled(with_client, sentry_init, caplog): assert "OK" not in caplog.text -@pytest.mark.skip( - reason="New behavior in SDK 2.0: You have a scope before init and add data to it." -) -def test_scope_initialized_before_client(sentry_init, capture_events): - """ - This is a consequence of how configure_scope() works. We must - make `configure_scope()` a noop if no client is configured. Even - if the user later configures a client: We don't know that. - """ - with configure_scope() as scope: - scope.set_tag("foo", 42) - - sentry_init() - - events = capture_events() - capture_message("hi") - (event,) = events - - assert "tags" not in event - - def test_weird_chars(sentry_init, capture_events): sentry_init() events = capture_events() @@ -1192,12 +1129,11 @@ def test_spotlight_option( class IssuesSamplerTestConfig: def __init__( self, - expected_events, - sampler_function=None, - sample_rate=None, - exception_to_raise=Exception, - ): - # type: (int, Optional[Callable[[Event], Union[float, bool]]], Optional[float], type[Exception]) -> None + expected_events: int, + sampler_function: Optional[Callable[[Event], Union[float, bool]]] = None, + sample_rate: Optional[float] = None, + exception_to_raise: type = Exception, + ) -> None: self.sampler_function_mock = ( None if sampler_function is None @@ -1207,14 +1143,12 @@ def __init__( self.sample_rate = sample_rate self.exception_to_raise = exception_to_raise - def init_sdk(self, sentry_init): - # type: (Callable[[*Any], None]) -> None + def init_sdk(self, sentry_init: Callable[..., None]) -> None: sentry_init( error_sampler=self.sampler_function_mock, sample_rate=self.sample_rate ) - def raise_exception(self): - # type: () -> None + def raise_exception(self) -> None: raise self.exception_to_raise() @@ -1497,12 +1431,6 @@ def test_dropped_transaction(sentry_init, capture_record_lost_event_calls, test_ test_config.run(sentry_init, capture_record_lost_event_calls) -@pytest.mark.parametrize("enable_tracing", [True, False]) -def test_enable_tracing_deprecated(sentry_init, enable_tracing): - with pytest.warns(DeprecationWarning): - sentry_init(enable_tracing=enable_tracing) - - def make_options_transport_cls(): """Make an options transport class that captures the options passed to it.""" # We need a unique class for each test so that the options are not diff --git a/tests/test_dsc.py b/tests/test_dsc.py index 8e549d0cf8..ea3c0b8988 100644 --- a/tests/test_dsc.py +++ b/tests/test_dsc.py @@ -29,8 +29,8 @@ def test_dsc_head_of_trace(sentry_init, capture_envelopes): ) envelopes = capture_envelopes() - # We start a new transaction - with sentry_sdk.start_transaction(name="foo"): + # We start a new root_span + with sentry_sdk.start_span(name="foo"): pass assert len(envelopes) == 1 @@ -97,10 +97,10 @@ def test_dsc_continuation_of_trace(sentry_init, capture_envelopes): "HTTP_BAGGAGE": baggage, } - # We continue the incoming trace and start a new transaction - transaction = sentry_sdk.continue_trace(incoming_http_headers) - with sentry_sdk.start_transaction(transaction, name="foo"): - pass + # We continue the incoming trace and start a new root span + with sentry_sdk.continue_trace(incoming_http_headers): + with sentry_sdk.start_span(name="foo"): + pass assert len(envelopes) == 1 @@ -117,7 +117,7 @@ def test_dsc_continuation_of_trace(sentry_init, capture_envelopes): assert "sample_rate" in envelope_trace_header assert type(envelope_trace_header["sample_rate"]) == str - assert envelope_trace_header["sample_rate"] == "1.0" + assert envelope_trace_header["sample_rate"] == "0.01337" assert "sampled" in envelope_trace_header assert type(envelope_trace_header["sampled"]) == str @@ -137,7 +137,7 @@ def test_dsc_continuation_of_trace(sentry_init, capture_envelopes): def test_dsc_continuation_of_trace_sample_rate_changed_in_traces_sampler( - sentry_init, capture_envelopes + sentry_init, capture_envelopes, monkeypatch ): """ Another service calls our service and passes tracing information to us. @@ -176,9 +176,9 @@ def my_traces_sampler(sampling_context): # We continue the incoming trace and start a new transaction with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.125): - transaction = sentry_sdk.continue_trace(incoming_http_headers) - with sentry_sdk.start_transaction(transaction, name="foo"): - pass + with sentry_sdk.continue_trace(incoming_http_headers): + with sentry_sdk.start_span(name="foo"): + pass assert len(envelopes) == 1 @@ -214,6 +214,213 @@ def my_traces_sampler(sampling_context): assert envelope_trace_header["transaction"] == "bar" +@pytest.mark.parametrize( + "test_data, expected_sample_rate, expected_sampled", + [ + # Test data: + # "incoming_sample_rate": + # The "sentry-sample_rate" in the incoming `baggage` header. + # "incoming_sampled": + # The "sentry-sampled" in the incoming `baggage` header. + # "sentry_trace_header_parent_sampled": + # The number at the end in the `sentry-trace` header, called "parent_sampled". + # "use_local_traces_sampler": + # Whether the local traces sampler is used. + # "local_traces_sampler_result": + # The result of the local traces sampler. + # "local_traces_sample_rate": + # The `traces_sample_rate` setting in the local `sentry_init` call. + ( # 1 traces_sample_rate does not override incoming + { + "incoming_sample_rate": 1.0, + "incoming_sampled": "true", + "sentry_trace_header_parent_sampled": 1, + "use_local_traces_sampler": False, + "local_traces_sampler_result": None, + "local_traces_sample_rate": 0.7, + }, + 1.0, # expected_sample_rate + "true", # expected_sampled + ), + ( # 2 traces_sampler overrides incoming + { + "incoming_sample_rate": 1.0, + "incoming_sampled": "true", + "sentry_trace_header_parent_sampled": 1, + "use_local_traces_sampler": True, + "local_traces_sampler_result": 0.5, + "local_traces_sample_rate": 0.7, + }, + 0.5, # expected_sample_rate + "true", # expected_sampled + ), + ( # 3 traces_sample_rate does not overrides incoming sample rate or parent (incoming not sampled) + { + "incoming_sample_rate": 1.0, + "incoming_sampled": "false", + "sentry_trace_header_parent_sampled": 0, + "use_local_traces_sampler": False, + "local_traces_sampler_result": None, + "local_traces_sample_rate": 0.7, + }, + None, # expected_sample_rate + "tracing-disabled-no-transactions-should-be-sent", # expected_sampled (because the parent sampled is 0) + ), + ( # 4 traces_sampler overrides incoming (incoming not sampled) + { + "incoming_sample_rate": 0.3, + "incoming_sampled": "false", + "sentry_trace_header_parent_sampled": 0, + "use_local_traces_sampler": True, + "local_traces_sampler_result": 0.25, + "local_traces_sample_rate": 0.7, + }, + 0.25, # expected_sample_rate + "false", # expected_sampled (traces sampler can override parent sampled) + ), + ( # 5 forwarding incoming (traces_sample_rate not set) + { + "incoming_sample_rate": 1.0, + "incoming_sampled": "true", + "sentry_trace_header_parent_sampled": 1, + "use_local_traces_sampler": False, + "local_traces_sampler_result": None, + "local_traces_sample_rate": None, + }, + None, # expected_sample_rate + "tracing-disabled-no-transactions-should-be-sent", # expected_sampled (traces_sample_rate=None disables all transaction creation) + ), + ( # 6 traces_sampler overrides incoming (traces_sample_rate not set) + { + "incoming_sample_rate": 1.0, + "incoming_sampled": "true", + "sentry_trace_header_parent_sampled": 1, + "use_local_traces_sampler": True, + "local_traces_sampler_result": 0.5, + "local_traces_sample_rate": None, + }, + 0.5, # expected_sample_rate + "true", # expected_sampled (traces sampler overrides the traces_sample_rate setting, so transactions are created) + ), + ( # 7 forwarding incoming (traces_sample_rate not set) (incoming not sampled) + { + "incoming_sample_rate": 1.0, + "incoming_sampled": "false", + "sentry_trace_header_parent_sampled": 0, + "use_local_traces_sampler": False, + "local_traces_sampler_result": None, + "local_traces_sample_rate": None, + }, + None, # expected_sample_rate + "tracing-disabled-no-transactions-should-be-sent", # expected_sampled (traces_sample_rate=None disables all transaction creation) + ), + ( # 8 traces_sampler overrides incoming (traces_sample_rate not set) (incoming not sampled) + { + "incoming_sample_rate": 0.3, + "incoming_sampled": "false", + "sentry_trace_header_parent_sampled": 0, + "use_local_traces_sampler": True, + "local_traces_sampler_result": 0.25, + "local_traces_sample_rate": None, + }, + 0.25, # expected_sample_rate + "false", # expected_sampled + ), + ( # 9 traces_sample_rate overrides incoming (upstream deferred sampling decision) + { + "incoming_sample_rate": 1.0, + "incoming_sampled": None, + "sentry_trace_header_parent_sampled": None, + "use_local_traces_sampler": False, + "local_traces_sampler_result": 0.5, + "local_traces_sample_rate": 0.7, + }, + 0.7, # expected_sample_rate + "true", # expected_sampled + ), + ], + ids=( + "1 traces_sample_rate does not override incoming", + "2 traces_sampler overrides incoming", + "3 traces_sample_rate does not overrides incoming sample rate or parent (incoming not sampled)", + "4 traces_sampler overrides incoming (incoming not sampled)", + "5 forwarding incoming (traces_sample_rate not set)", + "6 traces_sampler overrides incoming (traces_sample_rate not set)", + "7 forwarding incoming (traces_sample_rate not set) (incoming not sampled)", + "8 traces_sampler overrides incoming (traces_sample_rate not set) (incoming not sampled)", + "9 traces_sample_rate overrides incoming (upstream deferred sampling decision)", + ), +) +def test_dsc_sample_rate_change( + sentry_init, + capture_envelopes, + test_data, + expected_sample_rate, + expected_sampled, +): + """ + Another service calls our service and passes tracing information to us. + Our service is continuing the trace, but modifies the sample rate. + The DSC in transaction envelopes should contain the updated sample rate. + """ + + def my_traces_sampler(sampling_context): + return test_data["local_traces_sampler_result"] + + init_kwargs = { + "dsn": "https://mysecret@bla.ingest.sentry.io/12312012", + "release": "myapp@0.0.1", + "environment": "canary", + } + + if test_data["local_traces_sample_rate"]: + init_kwargs["traces_sample_rate"] = test_data["local_traces_sample_rate"] + + if test_data["use_local_traces_sampler"]: + init_kwargs["traces_sampler"] = my_traces_sampler + + sentry_init(**init_kwargs) + envelopes = capture_envelopes() + + # This is what the upstream service sends us + incoming_trace_id = "771a43a4192642f0b136d5159a501700" + if test_data["sentry_trace_header_parent_sampled"] is None: + sentry_trace = f"{incoming_trace_id}-1234567890abcdef" + else: + sentry_trace = f"{incoming_trace_id}-1234567890abcdef-{test_data['sentry_trace_header_parent_sampled']}" + + baggage = ( + f"sentry-trace_id={incoming_trace_id}, " + f"sentry-sample_rate={str(test_data['incoming_sample_rate'])}, " + f"sentry-sampled={test_data['incoming_sampled']}, " + "sentry-public_key=frontendpublickey, " + "sentry-release=myapp@0.0.1, " + "sentry-environment=prod, " + "sentry-transaction=foo, " + ) + incoming_http_headers = { + "HTTP_SENTRY_TRACE": sentry_trace, + "HTTP_BAGGAGE": baggage, + } + + # We continue the incoming trace and start a new transaction + with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.2): + with sentry_sdk.continue_trace(incoming_http_headers): + with sentry_sdk.start_span(name="foo"): + pass + + if expected_sampled == "tracing-disabled-no-transactions-should-be-sent": + assert len(envelopes) == 0 + else: + assert len(envelopes) == 1 + transaction_envelope = envelopes[0] + dsc_in_envelope_header = transaction_envelope.headers["trace"] + + assert dsc_in_envelope_header["sample_rate"] == str(expected_sample_rate) + assert dsc_in_envelope_header["sampled"] == str(expected_sampled).lower() + assert dsc_in_envelope_header["trace_id"] == incoming_trace_id + + def test_dsc_issue(sentry_init, capture_envelopes): """ Our service is a standalone service that does not have tracing enabled. Just uses Sentry for error reporting. @@ -225,7 +432,7 @@ def test_dsc_issue(sentry_init, capture_envelopes): ) envelopes = capture_envelopes() - # No transaction is started, just an error is captured + # No root span is started, just an error is captured try: 1 / 0 except ZeroDivisionError as exp: @@ -261,8 +468,8 @@ def test_dsc_issue(sentry_init, capture_envelopes): def test_dsc_issue_with_tracing(sentry_init, capture_envelopes): """ - Our service has tracing enabled and an error occurs in an transaction. - Envelopes containing errors also have the same DSC than the transaction envelopes. + Our service has tracing enabled and an error occurs in an root span. + Envelopes containing errors also have the same DSC than the root span envelopes. """ sentry_init( dsn="https://mysecret@bla.ingest.sentry.io/12312012", @@ -272,8 +479,8 @@ def test_dsc_issue_with_tracing(sentry_init, capture_envelopes): ) envelopes = capture_envelopes() - # We start a new transaction and an error occurs - with sentry_sdk.start_transaction(name="foo"): + # We start a new root span and an error occurs + with sentry_sdk.start_span(name="foo"): try: 1 / 0 except ZeroDivisionError as exp: @@ -319,7 +526,7 @@ def test_dsc_issue_with_tracing(sentry_init, capture_envelopes): "traces_sample_rate", [ 0, # no traces will be started, but if incoming traces will be continued (by our instrumentations, not happening in this test) - None, # no tracing at all. This service will never create transactions. + None, # no tracing at all. This service will never create root spans. ], ) def test_dsc_issue_twp(sentry_init, capture_envelopes, traces_sample_rate): @@ -358,14 +565,14 @@ def test_dsc_issue_twp(sentry_init, capture_envelopes, traces_sample_rate): } # We continue the trace (meaning: saving the incoming trace information on the scope) - # but in this test, we do not start a transaction. - sentry_sdk.continue_trace(incoming_http_headers) + # but in this test, we do not start a root span. + with sentry_sdk.continue_trace(incoming_http_headers): - # No transaction is started, just an error is captured - try: - 1 / 0 - except ZeroDivisionError as exp: - sentry_sdk.capture_exception(exp) + # No root span is started, just an error is captured + try: + 1 / 0 + except ZeroDivisionError as exp: + sentry_sdk.capture_exception(exp) assert len(envelopes) == 1 diff --git a/tests/test_exceptiongroup.py b/tests/test_exceptiongroup.py index 4c7afc58eb..b4512c9a79 100644 --- a/tests/test_exceptiongroup.py +++ b/tests/test_exceptiongroup.py @@ -1,4 +1,5 @@ import sys +from unittest import mock import pytest from sentry_sdk.utils import event_from_exception @@ -217,7 +218,10 @@ def test_exception_chain_cause(): { "mechanism": { "handled": False, - "type": "test_suite", + "type": "chained", + "exception_id": 1, + "parent_id": 0, + "source": "__cause__", }, "module": None, "type": "TypeError", @@ -227,6 +231,7 @@ def test_exception_chain_cause(): "mechanism": { "handled": False, "type": "test_suite", + "exception_id": 0, }, "module": None, "type": "ValueError", @@ -257,7 +262,10 @@ def test_exception_chain_context(): { "mechanism": { "handled": False, - "type": "test_suite", + "type": "chained", + "exception_id": 1, + "parent_id": 0, + "source": "__context__", }, "module": None, "type": "TypeError", @@ -267,6 +275,7 @@ def test_exception_chain_context(): "mechanism": { "handled": False, "type": "test_suite", + "exception_id": 0, }, "module": None, "type": "ValueError", @@ -297,6 +306,7 @@ def test_simple_exception(): "mechanism": { "handled": False, "type": "test_suite", + "exception_id": 0, }, "module": None, "type": "ValueError", @@ -306,3 +316,240 @@ def test_simple_exception(): exception_values = event["exception"]["values"] assert exception_values == expected_exception_values + + +@minimum_python_311 +def test_exceptiongroup_recursion(): + exception_group = None + + my_error = RuntimeError("my error") + try: + try: + raise my_error + except RuntimeError: + raise ExceptionGroup( + "my_group", + [my_error], + ) + except ExceptionGroup as e: + exception_group = e + + (event, _) = event_from_exception( + exception_group, + client_options={ + "include_local_variables": True, + "include_source_context": True, + "max_value_length": 1024, + }, + mechanism={"type": "test_suite", "handled": False}, + ) + + values = event["exception"]["values"] + + # For this test the stacktrace and the module is not important + for x in values: + if "stacktrace" in x: + del x["stacktrace"] + if "module" in x: + del x["module"] + + # One ExceptionGroup, + # then the RuntimeError in the ExceptionGroup, + # and the original RuntimeError that was raised. + assert len(values) == 3 + + expected_values = [ + { + "mechanism": { + "exception_id": 2, + "handled": False, + "parent_id": 0, + "source": "exceptions[0]", + "type": "chained", + }, + "type": "RuntimeError", + "value": "my error", + }, + { + "mechanism": { + "exception_id": 1, + "handled": False, + "parent_id": 0, + "source": "__context__", + "type": "chained", + }, + "type": "RuntimeError", + "value": "my error", + }, + { + "mechanism": { + "exception_id": 0, + "handled": False, + "is_exception_group": True, + "type": "test_suite", + }, + "type": "ExceptionGroup", + "value": "my_group", + }, + ] + + assert values == expected_values + + +@minimum_python_311 +def test_exceptiongroup_recursion_multiple_levels(): + error = None + + my_error = RuntimeError("my error") + my_error_2 = RuntimeError("my error 2") + try: + try: + raise my_error + except RuntimeError: + try: + raise ExceptionGroup( + "my_group", + [my_error_2], + ) + except ExceptionGroup: + raise my_error + + except RuntimeError as e: + error = e + + (event, _) = event_from_exception( + error, + client_options={ + "include_local_variables": True, + "include_source_context": True, + "max_value_length": 1024, + }, + mechanism={"type": "test_suite", "handled": False}, + ) + + values = event["exception"]["values"] + + # For this test the stacktrace and the module is not important + for x in values: + if "stacktrace" in x: + del x["stacktrace"] + if "module" in x: + del x["module"] + + # One ExceptionGroup, + # then the RuntimeError in the ExceptionGroup, + # and the original RuntimeError that was raised. + assert len(values) == 3 + + expected_values = [ + { + "mechanism": { + "type": "chained", + "handled": False, + "exception_id": 2, + "source": "exceptions[0]", + "parent_id": 1, + }, + "type": "RuntimeError", + "value": "my error 2", + }, + { + "mechanism": { + "type": "chained", + "handled": False, + "exception_id": 1, + "source": "__context__", + "parent_id": 0, + "is_exception_group": True, + }, + "type": "ExceptionGroup", + "value": "my_group", + }, + { + "mechanism": { + "type": "test_suite", + "handled": False, + "exception_id": 0, + }, + "type": "RuntimeError", + "value": "my error", + }, + ] + + assert values == expected_values + + +@minimum_python_311 +def test_too_many_exceptions(): + with mock.patch("sentry_sdk.utils.MAX_EXCEPTIONS", 3): + error = None + try: + try: + raise RuntimeError("my error 1") + except RuntimeError: + try: + raise RuntimeError("my error 2") + except RuntimeError: + try: + raise RuntimeError("my error 3") + except RuntimeError: + raise RuntimeError("my error 4") + except RuntimeError as e: + error = e + + (event, _) = event_from_exception( + error, + client_options={ + "include_local_variables": True, + "include_source_context": True, + "max_value_length": 1024, + }, + mechanism={"type": "test_suite", "handled": False}, + ) + + values = event["exception"]["values"] + + # For this test the stacktrace and the module is not important + for x in values: + if "stacktrace" in x: + del x["stacktrace"] + if "module" in x: + del x["module"] + + assert len(values) == 3 + + expected_values = [ + { + "mechanism": { + "type": "chained", + "handled": False, + "exception_id": 2, + "source": "__context__", + "parent_id": 1, + }, + "type": "RuntimeError", + "value": "my error 2", + }, + { + "mechanism": { + "type": "chained", + "handled": False, + "exception_id": 1, + "source": "__context__", + "parent_id": 0, + }, + "type": "RuntimeError", + "value": "my error 3", + }, + { + "mechanism": { + "type": "test_suite", + "handled": False, + "exception_id": 0, + }, + "type": "RuntimeError", + "value": "my error 4", + }, + ] + + assert values == expected_values diff --git a/tests/test_feature_flags.py b/tests/test_feature_flags.py index e0ab1e254e..43b611b89b 100644 --- a/tests/test_feature_flags.py +++ b/tests/test_feature_flags.py @@ -316,3 +316,19 @@ def test_flag_limit(sentry_init, capture_events): } ) assert "flag.evaluation.10" not in event["spans"][0]["data"] + + +def test_flag_counter_not_sent(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0) + + events = capture_events() + + with start_transaction(name="hi"): + with start_span(op="foo", name="bar"): + add_feature_flag("0", True) + add_feature_flag("1", True) + add_feature_flag("2", True) + add_feature_flag("3", True) + + (event,) = events + assert "_flag.count" not in event["spans"][0]["data"] diff --git a/tests/test_gevent.py b/tests/test_gevent.py index d330760adf..74cb650e9a 100644 --- a/tests/test_gevent.py +++ b/tests/test_gevent.py @@ -3,7 +3,7 @@ from datetime import datetime, timezone import sentry_sdk -from sentry_sdk._compat import PY37, PY38 +from sentry_sdk._compat import PY38 import pytest from tests.conftest import CapturingServer @@ -50,7 +50,7 @@ def inner(**kwargs): @pytest.mark.parametrize("compression_level", (0, 9, None)) @pytest.mark.parametrize( "compression_algo", - (("gzip", "br", "", None) if PY37 else ("gzip", "", None)), + ("gzip", "br", "", None), ) @pytest.mark.parametrize("http2", [True, False] if PY38 else [False]) def test_transport_works_gevent( diff --git a/tests/test_logs.py b/tests/test_logs.py index b2578d83d5..22c9d19bd9 100644 --- a/tests/test_logs.py +++ b/tests/test_logs.py @@ -17,10 +17,8 @@ ) -def otel_attributes_to_dict(otel_attrs): - # type: (Mapping[str, Any]) -> Mapping[str, Any] - def _convert_attr(attr): - # type: (Mapping[str, Union[str, float, bool]]) -> Any +def otel_attributes_to_dict(otel_attrs: Mapping[str, Any]) -> Mapping[str, Any]: + def _convert_attr(attr: Mapping[str, Union[str, float, bool]]) -> Any: if attr["type"] == "boolean": return attr["value"] if attr["type"] == "double": @@ -38,7 +36,7 @@ def _convert_attr(attr): def envelopes_to_logs(envelopes: List[Envelope]) -> List[Log]: - res = [] # type: List[Log] + res: List[Log] = [] for envelope in envelopes: for item in envelope.items: if item.type == "log": @@ -54,7 +52,7 @@ def envelopes_to_logs(envelopes: List[Envelope]) -> List[Log]: "attributes": otel_attributes_to_dict(log_json["attributes"]), "time_unix_nano": int(float(log_json["timestamp"]) * 1e9), "trace_id": log_json["trace_id"], - } # type: Log + } res.append(log) return res @@ -111,22 +109,6 @@ def test_logs_basics(sentry_init, capture_envelopes): assert logs[5].get("severity_number") == 21 -@minimum_python_37 -def test_logs_experimental_option_still_works(sentry_init, capture_envelopes): - sentry_init(_experiments={"enable_logs": True}) - envelopes = capture_envelopes() - - sentry_sdk.logger.error("This is an error log...") - - get_client().flush() - - logs = envelopes_to_logs(envelopes) - assert len(logs) == 1 - - assert logs[0].get("severity_text") == "error" - assert logs[0].get("severity_number") == 17 - - @minimum_python_37 def test_logs_before_send_log(sentry_init, capture_envelopes): before_log_called = False @@ -174,36 +156,6 @@ def _before_log(record, hint): assert before_log_called is True -@minimum_python_37 -def test_logs_before_send_log_experimental_option_still_works( - sentry_init, capture_envelopes -): - before_log_called = False - - def _before_log(record, hint): - nonlocal before_log_called - before_log_called = True - - return record - - sentry_init( - enable_logs=True, - _experiments={ - "before_send_log": _before_log, - }, - ) - envelopes = capture_envelopes() - - sentry_sdk.logger.error("This is an error log...") - - get_client().flush() - logs = envelopes_to_logs(envelopes) - assert len(logs) == 1 - - assert logs[0]["severity_text"] == "error" - assert before_log_called is True - - @minimum_python_37 def test_logs_attributes(sentry_init, capture_envelopes): """ @@ -281,15 +233,15 @@ def test_logs_message_params(sentry_init, capture_envelopes): @minimum_python_37 -def test_logs_tied_to_transactions(sentry_init, capture_envelopes): +def test_logs_tied_to_root_spans(sentry_init, capture_envelopes): """ - Log messages are also tied to transactions. + Log messages are also tied to root spans. """ sentry_init(enable_logs=True) envelopes = capture_envelopes() - with sentry_sdk.start_transaction(name="test-transaction") as trx: - sentry_sdk.logger.warning("This is a log tied to a transaction") + with sentry_sdk.start_span(name="test-root-span") as trx: + sentry_sdk.logger.warning("This is a log tied to a root span") get_client().flush() logs = envelopes_to_logs(envelopes) @@ -304,9 +256,9 @@ def test_logs_tied_to_spans(sentry_init, capture_envelopes): sentry_init(enable_logs=True) envelopes = capture_envelopes() - with sentry_sdk.start_transaction(name="test-transaction"): + with sentry_sdk.start_span(name="test-root-span"): with sentry_sdk.start_span(name="test-span") as span: - sentry_sdk.logger.warning("This is a log tied to a span") + sentry_sdk.logger.warning("This is a log tied to a child span") get_client().flush() logs = envelopes_to_logs(envelopes) diff --git a/tests/test_metrics.py b/tests/test_metrics.py deleted file mode 100644 index c02f075288..0000000000 --- a/tests/test_metrics.py +++ /dev/null @@ -1,971 +0,0 @@ -import sys -import time -import linecache -from unittest import mock - -import pytest - -import sentry_sdk -from sentry_sdk import metrics -from sentry_sdk.tracing import TransactionSource -from sentry_sdk.envelope import parse_json - -try: - import gevent -except ImportError: - gevent = None - - -minimum_python_37_with_gevent = pytest.mark.skipif( - gevent and sys.version_info < (3, 7), - reason="Require Python 3.7 or higher with gevent", -) - - -def parse_metrics(bytes): - rv = [] - for line in bytes.splitlines(): - pieces = line.decode("utf-8").split("|") - payload = pieces[0].split(":") - name = payload[0] - values = payload[1:] - ty = pieces[1] - ts = None - tags = {} - for piece in pieces[2:]: - if piece[0] == "#": - for pair in piece[1:].split(","): - k, v = pair.split(":", 1) - old = tags.get(k) - if old is not None: - if isinstance(old, list): - old.append(v) - else: - tags[k] = [old, v] - else: - tags[k] = v - elif piece[0] == "T": - ts = int(piece[1:]) - else: - raise ValueError("unknown piece %r" % (piece,)) - rv.append((ts, name, ty, values, tags)) - rv.sort(key=lambda x: (x[0], x[1], tuple(sorted(tags.items())))) - return rv - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_increment(sentry_init, capture_envelopes, maybe_monkeypatched_threading): - sentry_init( - release="fun-release", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": True}, - ) - ts = time.time() - envelopes = capture_envelopes() - - metrics.increment("foobar", 1.0, tags={"foo": "bar", "blub": "blah"}, timestamp=ts) - # python specific alias - metrics.incr("foobar", 2.0, tags={"foo": "bar", "blub": "blah"}, timestamp=ts) - sentry_sdk.flush() - - (envelope,) = envelopes - statsd_item, meta_item = envelope.items - - assert statsd_item.headers["type"] == "statsd" - m = parse_metrics(statsd_item.payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "foobar@none" - assert m[0][2] == "c" - assert m[0][3] == ["3.0"] - assert m[0][4] == { - "blub": "blah", - "foo": "bar", - "release": "fun-release", - "environment": "not-fun-env", - } - - assert meta_item.headers["type"] == "metric_meta" - assert parse_json(meta_item.payload.get_bytes()) == { - "timestamp": mock.ANY, - "mapping": { - "c:foobar@none": [ - { - "type": "location", - "filename": "tests/test_metrics.py", - "abs_path": __file__, - "function": sys._getframe().f_code.co_name, - "module": __name__, - "lineno": mock.ANY, - "pre_context": mock.ANY, - "context_line": mock.ANY, - "post_context": mock.ANY, - } - ] - }, - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_timing(sentry_init, capture_envelopes, maybe_monkeypatched_threading): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": True}, - ) - ts = time.time() - envelopes = capture_envelopes() - - with metrics.timing("whatever", tags={"blub": "blah"}, timestamp=ts): - time.sleep(0.1) - sentry_sdk.flush() - - (envelope,) = envelopes - statsd_item, meta_item = envelope.items - - assert statsd_item.headers["type"] == "statsd" - m = parse_metrics(statsd_item.payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "whatever@second" - assert m[0][2] == "d" - assert len(m[0][3]) == 1 - assert float(m[0][3][0]) >= 0.1 - assert m[0][4] == { - "blub": "blah", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert meta_item.headers["type"] == "metric_meta" - json = parse_json(meta_item.payload.get_bytes()) - assert json == { - "timestamp": mock.ANY, - "mapping": { - "d:whatever@second": [ - { - "type": "location", - "filename": "tests/test_metrics.py", - "abs_path": __file__, - "function": sys._getframe().f_code.co_name, - "module": __name__, - "lineno": mock.ANY, - "pre_context": mock.ANY, - "context_line": mock.ANY, - "post_context": mock.ANY, - } - ] - }, - } - - loc = json["mapping"]["d:whatever@second"][0] - line = linecache.getline(loc["abs_path"], loc["lineno"]) - assert ( - line.strip() - == 'with metrics.timing("whatever", tags={"blub": "blah"}, timestamp=ts):' - ) - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_timing_decorator( - sentry_init, capture_envelopes, maybe_monkeypatched_threading -): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": True}, - ) - envelopes = capture_envelopes() - - @metrics.timing("whatever-1", tags={"x": "y"}) - def amazing(): - time.sleep(0.1) - return 42 - - @metrics.timing("whatever-2", tags={"x": "y"}, unit="nanosecond") - def amazing_nano(): - time.sleep(0.01) - return 23 - - assert amazing() == 42 - assert amazing_nano() == 23 - sentry_sdk.flush() - - (envelope,) = envelopes - statsd_item, meta_item = envelope.items - - assert statsd_item.headers["type"] == "statsd" - m = parse_metrics(statsd_item.payload.get_bytes()) - - assert len(m) == 2 - assert m[0][1] == "whatever-1@second" - assert m[0][2] == "d" - assert len(m[0][3]) == 1 - assert float(m[0][3][0]) >= 0.1 - assert m[0][4] == { - "x": "y", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert m[1][1] == "whatever-2@nanosecond" - assert m[1][2] == "d" - assert len(m[1][3]) == 1 - assert float(m[1][3][0]) >= 10000000.0 - assert m[1][4] == { - "x": "y", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert meta_item.headers["type"] == "metric_meta" - json = parse_json(meta_item.payload.get_bytes()) - assert json == { - "timestamp": mock.ANY, - "mapping": { - "d:whatever-1@second": [ - { - "type": "location", - "filename": "tests/test_metrics.py", - "abs_path": __file__, - "function": sys._getframe().f_code.co_name, - "module": __name__, - "lineno": mock.ANY, - "pre_context": mock.ANY, - "context_line": mock.ANY, - "post_context": mock.ANY, - } - ], - "d:whatever-2@nanosecond": [ - { - "type": "location", - "filename": "tests/test_metrics.py", - "abs_path": __file__, - "function": sys._getframe().f_code.co_name, - "module": __name__, - "lineno": mock.ANY, - "pre_context": mock.ANY, - "context_line": mock.ANY, - "post_context": mock.ANY, - } - ], - }, - } - - # XXX: this is not the best location. It would probably be better to - # report the location in the function, however that is quite a bit - # tricker to do since we report from outside the function so we really - # only see the callsite. - loc = json["mapping"]["d:whatever-1@second"][0] - line = linecache.getline(loc["abs_path"], loc["lineno"]) - assert line.strip() == "assert amazing() == 42" - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_timing_basic(sentry_init, capture_envelopes, maybe_monkeypatched_threading): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": True}, - ) - ts = time.time() - envelopes = capture_envelopes() - - metrics.timing("timing", 1.0, tags={"a": "b"}, timestamp=ts) - metrics.timing("timing", 2.0, tags={"a": "b"}, timestamp=ts) - metrics.timing("timing", 2.0, tags={"a": "b"}, timestamp=ts) - metrics.timing("timing", 3.0, tags={"a": "b"}, timestamp=ts) - sentry_sdk.flush() - - (envelope,) = envelopes - statsd_item, meta_item = envelope.items - - assert statsd_item.headers["type"] == "statsd" - m = parse_metrics(statsd_item.payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "timing@second" - assert m[0][2] == "d" - assert len(m[0][3]) == 4 - assert sorted(map(float, m[0][3])) == [1.0, 2.0, 2.0, 3.0] - assert m[0][4] == { - "a": "b", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert meta_item.headers["type"] == "metric_meta" - assert parse_json(meta_item.payload.get_bytes()) == { - "timestamp": mock.ANY, - "mapping": { - "d:timing@second": [ - { - "type": "location", - "filename": "tests/test_metrics.py", - "abs_path": __file__, - "function": sys._getframe().f_code.co_name, - "module": __name__, - "lineno": mock.ANY, - "pre_context": mock.ANY, - "context_line": mock.ANY, - "post_context": mock.ANY, - } - ] - }, - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_distribution(sentry_init, capture_envelopes, maybe_monkeypatched_threading): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": True}, - ) - ts = time.time() - envelopes = capture_envelopes() - - metrics.distribution("dist", 1.0, tags={"a": "b"}, timestamp=ts) - metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts) - metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts) - metrics.distribution("dist", 3.0, tags={"a": "b"}, timestamp=ts) - sentry_sdk.flush() - - (envelope,) = envelopes - statsd_item, meta_item = envelope.items - - assert statsd_item.headers["type"] == "statsd" - m = parse_metrics(statsd_item.payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "dist@none" - assert m[0][2] == "d" - assert len(m[0][3]) == 4 - assert sorted(map(float, m[0][3])) == [1.0, 2.0, 2.0, 3.0] - assert m[0][4] == { - "a": "b", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert meta_item.headers["type"] == "metric_meta" - json = parse_json(meta_item.payload.get_bytes()) - assert json == { - "timestamp": mock.ANY, - "mapping": { - "d:dist@none": [ - { - "type": "location", - "filename": "tests/test_metrics.py", - "abs_path": __file__, - "function": sys._getframe().f_code.co_name, - "module": __name__, - "lineno": mock.ANY, - "pre_context": mock.ANY, - "context_line": mock.ANY, - "post_context": mock.ANY, - } - ] - }, - } - - loc = json["mapping"]["d:dist@none"][0] - line = linecache.getline(loc["abs_path"], loc["lineno"]) - assert ( - line.strip() - == 'metrics.distribution("dist", 1.0, tags={"a": "b"}, timestamp=ts)' - ) - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_set(sentry_init, capture_envelopes, maybe_monkeypatched_threading): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": True}, - ) - ts = time.time() - envelopes = capture_envelopes() - - metrics.set("my-set", "peter", tags={"magic": "puff"}, timestamp=ts) - metrics.set("my-set", "paul", tags={"magic": "puff"}, timestamp=ts) - metrics.set("my-set", "mary", tags={"magic": "puff"}, timestamp=ts) - sentry_sdk.flush() - - (envelope,) = envelopes - statsd_item, meta_item = envelope.items - - assert statsd_item.headers["type"] == "statsd" - m = parse_metrics(statsd_item.payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "my-set@none" - assert m[0][2] == "s" - assert len(m[0][3]) == 3 - assert sorted(map(int, m[0][3])) == [354582103, 2513273657, 3329318813] - assert m[0][4] == { - "magic": "puff", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert meta_item.headers["type"] == "metric_meta" - assert parse_json(meta_item.payload.get_bytes()) == { - "timestamp": mock.ANY, - "mapping": { - "s:my-set@none": [ - { - "type": "location", - "filename": "tests/test_metrics.py", - "abs_path": __file__, - "function": sys._getframe().f_code.co_name, - "module": __name__, - "lineno": mock.ANY, - "pre_context": mock.ANY, - "context_line": mock.ANY, - "post_context": mock.ANY, - } - ] - }, - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_gauge(sentry_init, capture_envelopes, maybe_monkeypatched_threading): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": False}, - ) - ts = time.time() - envelopes = capture_envelopes() - - metrics.gauge("my-gauge", 10.0, tags={"x": "y"}, timestamp=ts) - metrics.gauge("my-gauge", 20.0, tags={"x": "y"}, timestamp=ts) - metrics.gauge("my-gauge", 30.0, tags={"x": "y"}, timestamp=ts) - sentry_sdk.flush() - - (envelope,) = envelopes - - assert len(envelope.items) == 1 - assert envelope.items[0].headers["type"] == "statsd" - m = parse_metrics(envelope.items[0].payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "my-gauge@none" - assert m[0][2] == "g" - assert len(m[0][3]) == 5 - assert list(map(float, m[0][3])) == [30.0, 10.0, 30.0, 60.0, 3.0] - assert m[0][4] == { - "x": "y", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_multiple(sentry_init, capture_envelopes): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": False}, - ) - ts = time.time() - envelopes = capture_envelopes() - - metrics.gauge("my-gauge", 10.0, tags={"x": "y"}, timestamp=ts) - metrics.gauge("my-gauge", 20.0, tags={"x": "y"}, timestamp=ts) - metrics.gauge("my-gauge", 30.0, tags={"x": "y"}, timestamp=ts) - for _ in range(10): - metrics.increment("counter-1", 1.0, timestamp=ts) - metrics.increment("counter-2", 1.0, timestamp=ts) - - sentry_sdk.flush() - - (envelope,) = envelopes - - assert len(envelope.items) == 1 - assert envelope.items[0].headers["type"] == "statsd" - m = parse_metrics(envelope.items[0].payload.get_bytes()) - - assert len(m) == 3 - - assert m[0][1] == "counter-1@none" - assert m[0][2] == "c" - assert list(map(float, m[0][3])) == [10.0] - assert m[0][4] == { - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert m[1][1] == "counter-2@none" - assert m[1][2] == "c" - assert list(map(float, m[1][3])) == [1.0] - assert m[1][4] == { - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert m[2][1] == "my-gauge@none" - assert m[2][2] == "g" - assert len(m[2][3]) == 5 - assert list(map(float, m[2][3])) == [30.0, 10.0, 30.0, 60.0, 3.0] - assert m[2][4] == { - "x": "y", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_transaction_name( - sentry_init, capture_envelopes, maybe_monkeypatched_threading -): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": False}, - ) - ts = time.time() - envelopes = capture_envelopes() - - sentry_sdk.get_current_scope().set_transaction_name( - "/user/{user_id}", source=TransactionSource.ROUTE - ) - metrics.distribution("dist", 1.0, tags={"a": "b"}, timestamp=ts) - metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts) - metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts) - metrics.distribution("dist", 3.0, tags={"a": "b"}, timestamp=ts) - - sentry_sdk.flush() - - (envelope,) = envelopes - - assert len(envelope.items) == 1 - assert envelope.items[0].headers["type"] == "statsd" - m = parse_metrics(envelope.items[0].payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "dist@none" - assert m[0][2] == "d" - assert len(m[0][3]) == 4 - assert sorted(map(float, m[0][3])) == [1.0, 2.0, 2.0, 3.0] - assert m[0][4] == { - "a": "b", - "transaction": "/user/{user_id}", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_metric_summaries( - sentry_init, capture_envelopes, maybe_monkeypatched_threading -): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - enable_tracing=True, - ) - ts = time.time() - envelopes = capture_envelopes() - - with sentry_sdk.start_transaction( - op="stuff", name="/foo", source=TransactionSource.ROUTE - ) as transaction: - metrics.increment("root-counter", timestamp=ts) - with metrics.timing("my-timer-metric", tags={"a": "b"}, timestamp=ts): - for x in range(10): - metrics.distribution("my-dist", float(x), timestamp=ts) - - sentry_sdk.flush() - - (transaction, envelope) = envelopes - - # Metrics Emission - assert envelope.items[0].headers["type"] == "statsd" - m = parse_metrics(envelope.items[0].payload.get_bytes()) - - assert len(m) == 3 - - assert m[0][1] == "my-dist@none" - assert m[0][2] == "d" - assert len(m[0][3]) == 10 - assert sorted(m[0][3]) == list(map(str, map(float, range(10)))) - assert m[0][4] == { - "transaction": "/foo", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert m[1][1] == "my-timer-metric@second" - assert m[1][2] == "d" - assert len(m[1][3]) == 1 - assert m[1][4] == { - "a": "b", - "transaction": "/foo", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert m[2][1] == "root-counter@none" - assert m[2][2] == "c" - assert m[2][3] == ["1.0"] - assert m[2][4] == { - "transaction": "/foo", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - # Measurement Attachment - t = transaction.items[0].get_transaction_event() - - assert t["_metrics_summary"] == { - "c:root-counter@none": [ - { - "count": 1, - "min": 1.0, - "max": 1.0, - "sum": 1.0, - "tags": { - "transaction": "/foo", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - }, - } - ] - } - - assert t["spans"][0]["_metrics_summary"]["d:my-dist@none"] == [ - { - "count": 10, - "min": 0.0, - "max": 9.0, - "sum": 45.0, - "tags": { - "environment": "not-fun-env", - "release": "fun-release@1.0.0", - "transaction": "/foo", - }, - } - ] - - assert t["spans"][0]["tags"] == {"a": "b"} - (timer,) = t["spans"][0]["_metrics_summary"]["d:my-timer-metric@second"] - assert timer["count"] == 1 - assert timer["max"] == timer["min"] == timer["sum"] - assert timer["sum"] > 0 - assert timer["tags"] == { - "a": "b", - "environment": "not-fun-env", - "release": "fun-release@1.0.0", - "transaction": "/foo", - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -@pytest.mark.parametrize( - "metric_name,metric_unit,expected_name", - [ - ("first-metric", "nano-second", "first-metric@nanosecond"), - ("another_metric?", "nano second", "another_metric_@nanosecond"), - ( - "metric", - "nanosecond", - "metric@nanosecond", - ), - ( - "my.amaze.metric I guess", - "nano|\nsecond", - "my.amaze.metric_I_guess@nanosecond", - ), - ("métríc", "nanöseconď", "m_tr_c@nansecon"), - ], -) -def test_metric_name_normalization( - sentry_init, - capture_envelopes, - metric_name, - metric_unit, - expected_name, - maybe_monkeypatched_threading, -): - sentry_init( - _experiments={"enable_metrics": True, "metric_code_locations": False}, - ) - envelopes = capture_envelopes() - - metrics.distribution(metric_name, 1.0, unit=metric_unit) - - sentry_sdk.flush() - - (envelope,) = envelopes - - assert len(envelope.items) == 1 - assert envelope.items[0].headers["type"] == "statsd" - - parsed_metrics = parse_metrics(envelope.items[0].payload.get_bytes()) - assert len(parsed_metrics) == 1 - - name = parsed_metrics[0][1] - assert name == expected_name - - -@minimum_python_37_with_gevent -@pytest.mark.forked -@pytest.mark.parametrize( - "metric_tag,expected_tag", - [ - ({"f-oo|bar": "%$foo/"}, {"f-oobar": "%$foo/"}), - ({"foo$.$.$bar": "blah{}"}, {"foo..bar": "blah{}"}), - ( - {"foö-bar": "snöwmän"}, - {"fo-bar": "snöwmän"}, - ), - ({"route": "GET /foo"}, {"route": "GET /foo"}), - ({"__bar__": "this | or , that"}, {"__bar__": "this \\u{7c} or \\u{2c} that"}), - ({"foo/": "hello!\n\r\t\\"}, {"foo/": "hello!\\n\\r\\t\\\\"}), - ], -) -def test_metric_tag_normalization( - sentry_init, - capture_envelopes, - metric_tag, - expected_tag, - maybe_monkeypatched_threading, -): - sentry_init( - _experiments={"enable_metrics": True, "metric_code_locations": False}, - ) - envelopes = capture_envelopes() - - metrics.distribution("a", 1.0, tags=metric_tag) - - sentry_sdk.flush() - - (envelope,) = envelopes - - assert len(envelope.items) == 1 - assert envelope.items[0].headers["type"] == "statsd" - - parsed_metrics = parse_metrics(envelope.items[0].payload.get_bytes()) - assert len(parsed_metrics) == 1 - - tags = parsed_metrics[0][4] - - expected_tag_key, expected_tag_value = expected_tag.popitem() - assert expected_tag_key in tags - assert tags[expected_tag_key] == expected_tag_value - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_before_emit_metric( - sentry_init, capture_envelopes, maybe_monkeypatched_threading -): - def before_emit(key, value, unit, tags): - if key == "removed-metric" or value == 47 or unit == "unsupported": - return False - - tags["extra"] = "foo" - del tags["release"] - # this better be a noop! - metrics.increment("shitty-recursion") - return True - - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={ - "enable_metrics": True, - "metric_code_locations": False, - "before_emit_metric": before_emit, - }, - ) - envelopes = capture_envelopes() - - metrics.increment("removed-metric", 1.0) - metrics.increment("another-removed-metric", 47) - metrics.increment("yet-another-removed-metric", 1.0, unit="unsupported") - metrics.increment("actual-metric", 1.0) - sentry_sdk.flush() - - (envelope,) = envelopes - - assert len(envelope.items) == 1 - assert envelope.items[0].headers["type"] == "statsd" - m = parse_metrics(envelope.items[0].payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "actual-metric@none" - assert m[0][3] == ["1.0"] - assert m[0][4] == { - "extra": "foo", - "environment": "not-fun-env", - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_aggregator_flush( - sentry_init, capture_envelopes, maybe_monkeypatched_threading -): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={ - "enable_metrics": True, - }, - ) - envelopes = capture_envelopes() - - metrics.increment("a-metric", 1.0) - sentry_sdk.flush() - - assert len(envelopes) == 1 - assert sentry_sdk.get_client().metrics_aggregator.buckets == {} - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_tag_serialization( - sentry_init, capture_envelopes, maybe_monkeypatched_threading -): - sentry_init( - release="fun-release", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": False}, - ) - envelopes = capture_envelopes() - - metrics.increment( - "counter", - tags={ - "no-value": None, - "an-int": 42, - "a-float": 23.0, - "a-string": "blah", - "more-than-one": [1, "zwei", "3.0", None], - }, - ) - sentry_sdk.flush() - - (envelope,) = envelopes - - assert len(envelope.items) == 1 - assert envelope.items[0].headers["type"] == "statsd" - m = parse_metrics(envelope.items[0].payload.get_bytes()) - - assert len(m) == 1 - assert m[0][4] == { - "an-int": "42", - "a-float": "23.0", - "a-string": "blah", - "more-than-one": ["1", "3.0", "zwei"], - "release": "fun-release", - "environment": "not-fun-env", - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_flush_recursion_protection( - sentry_init, capture_envelopes, monkeypatch, maybe_monkeypatched_threading -): - sentry_init( - release="fun-release", - environment="not-fun-env", - _experiments={"enable_metrics": True}, - ) - envelopes = capture_envelopes() - test_client = sentry_sdk.get_client() - - real_capture_envelope = test_client.transport.capture_envelope - - def bad_capture_envelope(*args, **kwargs): - metrics.increment("bad-metric") - return real_capture_envelope(*args, **kwargs) - - monkeypatch.setattr(test_client.transport, "capture_envelope", bad_capture_envelope) - - metrics.increment("counter") - - # flush twice to see the inner metric - sentry_sdk.flush() - sentry_sdk.flush() - - (envelope,) = envelopes - m = parse_metrics(envelope.items[0].payload.get_bytes()) - assert len(m) == 1 - assert m[0][1] == "counter@none" - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_flush_recursion_protection_background_flush( - sentry_init, capture_envelopes, monkeypatch, maybe_monkeypatched_threading -): - monkeypatch.setattr(metrics.MetricsAggregator, "FLUSHER_SLEEP_TIME", 0.01) - sentry_init( - release="fun-release", - environment="not-fun-env", - _experiments={"enable_metrics": True}, - ) - envelopes = capture_envelopes() - test_client = sentry_sdk.get_client() - - real_capture_envelope = test_client.transport.capture_envelope - - def bad_capture_envelope(*args, **kwargs): - metrics.increment("bad-metric") - return real_capture_envelope(*args, **kwargs) - - monkeypatch.setattr(test_client.transport, "capture_envelope", bad_capture_envelope) - - metrics.increment("counter") - - # flush via sleep and flag - sentry_sdk.get_client().metrics_aggregator._force_flush = True - time.sleep(0.5) - - (envelope,) = envelopes - m = parse_metrics(envelope.items[0].payload.get_bytes()) - assert len(m) == 1 - assert m[0][1] == "counter@none" - - -@pytest.mark.skipif( - not gevent or sys.version_info >= (3, 7), - reason="Python 3.6 or lower and gevent required", -) -@pytest.mark.forked -def test_disable_metrics_for_old_python_with_gevent( - sentry_init, capture_envelopes, maybe_monkeypatched_threading -): - if maybe_monkeypatched_threading != "greenlet": - pytest.skip("Test specifically for gevent/greenlet") - - sentry_init( - release="fun-release", - environment="not-fun-env", - _experiments={"enable_metrics": True}, - ) - envelopes = capture_envelopes() - - metrics.incr("counter") - - sentry_sdk.flush() - - assert sentry_sdk.get_client().metrics_aggregator is None - assert not envelopes diff --git a/tests/test_monitor.py b/tests/test_monitor.py index b48d9f6282..42f9c1960c 100644 --- a/tests/test_monitor.py +++ b/tests/test_monitor.py @@ -54,14 +54,16 @@ def test_monitor_unhealthy(sentry_init): assert monitor.downsample_factor == (i + 1 if i < 10 else 10) -def test_transaction_uses_downsampled_rate( - sentry_init, capture_record_lost_event_calls, monkeypatch +def test_root_span_uses_downsample_rate( + sentry_init, capture_envelopes, capture_record_lost_event_calls, monkeypatch ): sentry_init( traces_sample_rate=1.0, transport=UnhealthyTestTransport(), ) + envelopes = capture_envelopes() + record_lost_event_calls = capture_record_lost_event_calls() monitor = sentry_sdk.get_client().monitor @@ -72,16 +74,33 @@ def test_transaction_uses_downsampled_rate( assert monitor.is_healthy() is False assert monitor.downsample_factor == 1 - # make sure we don't sample the transaction + # make sure we don't sample the root span with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.75): - with sentry_sdk.start_transaction(name="foobar") as transaction: - assert transaction.sampled is False - assert transaction.sample_rate == 0.5 + with sentry_sdk.start_span(name="foobar") as root_span: + with sentry_sdk.start_span(name="foospan"): + with sentry_sdk.start_span(name="foospan2"): + with sentry_sdk.start_span(name="foospan3"): + ... + + assert root_span.sampled is False + assert root_span.sample_rate == 0.5 + + assert len(envelopes) == 0 assert Counter(record_lost_event_calls) == Counter( [ - ("backpressure", "transaction", None, 1), - ("backpressure", "span", None, 1), + ( + "backpressure", + "transaction", + None, + 1, + ), + ( + "backpressure", + "span", + None, + 1, + ), # Only one span (the root span itself) is counted, since we did not record any spans in the first place. ] ) diff --git a/tests/test_propagationcontext.py b/tests/test_propagationcontext.py index a0ce1094fa..797a18cecd 100644 --- a/tests/test_propagationcontext.py +++ b/tests/test_propagationcontext.py @@ -3,7 +3,7 @@ import pytest -from sentry_sdk.tracing_utils import PropagationContext +from sentry_sdk.tracing_utils import Baggage, PropagationContext SAMPLED_FLAG = { @@ -29,23 +29,26 @@ def test_empty_context(): def test_context_with_values(): + baggage = Baggage( + sentry_items={ + "sentry-trace": "1234567890abcdef1234567890abcdef-1234567890abcdef-1" + }, + third_party_items={"foo": "bar"}, + mutable=False, + ) ctx = PropagationContext( trace_id="1234567890abcdef1234567890abcdef", span_id="1234567890abcdef", parent_span_id="abcdef1234567890", parent_sampled=True, - dynamic_sampling_context={ - "foo": "bar", - }, + baggage=baggage, ) assert ctx.trace_id == "1234567890abcdef1234567890abcdef" assert ctx.span_id == "1234567890abcdef" assert ctx.parent_span_id == "abcdef1234567890" assert ctx.parent_sampled - assert ctx.dynamic_sampling_context == { - "foo": "bar", - } + assert ctx.baggage == baggage def test_lazy_uuids(): @@ -101,11 +104,11 @@ def test_update(): def test_existing_sample_rand_kept(): ctx = PropagationContext( trace_id="00000000000000000000000000000000", - dynamic_sampling_context={"sample_rand": "0.5"}, + baggage=Baggage(sentry_items={"sample_rand": "0.5"}), ) - # If sample_rand was regenerated, the value would be 0.919221 based on the trace_id assert ctx.dynamic_sampling_context["sample_rand"] == "0.5" + assert ctx.baggage.sentry_items["sample_rand"] == "0.5" @pytest.mark.parametrize( @@ -155,7 +158,7 @@ def mock_random_class(seed): ) assert ( - ctx.dynamic_sampling_context["sample_rand"] + ctx.dynamic_sampling_context.get("sample_rand") == f"{expected_interval[0]:.6f}" # noqa: E231 ) assert mock_uniform.call_count == 1 diff --git a/tests/test_scope.py b/tests/test_scope.py index e645d84234..4fbd04aa5c 100644 --- a/tests/test_scope.py +++ b/tests/test_scope.py @@ -11,12 +11,22 @@ ) from sentry_sdk.client import Client, NonRecordingClient from sentry_sdk.scope import ( - Scope, + Scope as BaseScope, ScopeType, - use_isolation_scope, - use_scope, should_send_default_pii, ) +from sentry_sdk.opentelemetry.scope import ( + PotelScope as Scope, + use_scope, + use_isolation_scope, + setup_scope_context_management, +) +from tests.conftest import ApproxDict + + +@pytest.fixture(autouse=True) +def setup_otel_scope_management(): + setup_scope_context_management() def test_copying(): @@ -206,7 +216,8 @@ def test_scope_client(): assert scope.client.__class__ == NonRecordingClient custom_client = Client() - scope = Scope(ty="test_more", client=custom_client) + scope = Scope(ty="test_more") + scope.set_client(custom_client) assert scope._type == "test_more" assert scope.client is not None assert scope.client.__class__ == Client @@ -230,7 +241,7 @@ def test_get_isolation_scope(): def test_get_global_scope(): scope = Scope.get_global_scope() assert scope is not None - assert scope.__class__ == Scope + assert scope.__class__ == BaseScope assert scope._type == ScopeType.GLOBAL @@ -797,7 +808,7 @@ def test_nested_scopes_with_tags(sentry_init, capture_envelopes): with sentry_sdk.new_scope() as scope2: scope2.set_tag("current_scope2", 1) - with sentry_sdk.start_transaction(name="trx") as trx: + with sentry_sdk.start_span(name="trx") as trx: trx.set_tag("trx", 1) with sentry_sdk.start_span(op="span1") as span1: @@ -812,9 +823,13 @@ def test_nested_scopes_with_tags(sentry_init, capture_envelopes): (envelope,) = envelopes transaction = envelope.items[0].get_transaction_event() - assert transaction["tags"] == {"isolation_scope1": 1, "current_scope2": 1, "trx": 1} - assert transaction["spans"][0]["tags"] == {"a": 1} - assert transaction["spans"][1]["tags"] == {"b": 1} + assert transaction["tags"] == { + "isolation_scope1": "1", + "current_scope2": "1", + "trx": "1", + } + assert transaction["spans"][0]["tags"] == ApproxDict({"a": "1"}) + assert transaction["spans"][1]["tags"] == ApproxDict({"b": "1"}) def test_should_send_default_pii_true(sentry_init): @@ -874,7 +889,7 @@ def test_set_tags(): def test_last_event_id(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) assert Scope.last_event_id() is None @@ -884,18 +899,18 @@ def test_last_event_id(sentry_init): def test_last_event_id_transaction(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) assert Scope.last_event_id() is None - with sentry_sdk.start_transaction(name="test"): + with sentry_sdk.start_span(name="test"): pass assert Scope.last_event_id() is None, "Transaction should not set last_event_id" def test_last_event_id_cleared(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) # Make sure last_event_id is set sentry_sdk.capture_exception(Exception("test")) @@ -907,65 +922,120 @@ def test_last_event_id_cleared(sentry_init): assert Scope.last_event_id() is None, "last_event_id should be cleared" -@pytest.mark.tests_internal_exceptions +def test_root_span(sentry_init): + sentry_init(traces_sample_rate=1.0) + + assert sentry_sdk.get_current_scope().root_span is None + + with sentry_sdk.start_span(name="test") as root_span: + assert sentry_sdk.get_current_scope().root_span == root_span + with sentry_sdk.start_span(name="child"): + assert sentry_sdk.get_current_scope().root_span == root_span + with sentry_sdk.start_span(name="grandchild"): + assert sentry_sdk.get_current_scope().root_span == root_span + + assert sentry_sdk.get_current_scope().root_span is None + + @pytest.mark.parametrize( - "scope_manager", + ("key", "value", "expected"), [ - new_scope, - use_scope, + ("int", 123, "123"), + ("float", 123.456, "123.456"), + ("bool_true", True, "True"), + ("bool_false", False, "False"), + ("none", None, "None"), + ("list", [1, 2, 3], "[1, 2, 3]"), + ("dict", {"key": "value"}, "{'key': 'value'}"), + ("already_string", "test", "test"), ], ) -def test_handle_lookup_error_on_token_reset_current_scope(scope_manager): - with mock.patch("sentry_sdk.scope.capture_internal_exception") as mock_capture: - with mock.patch("sentry_sdk.scope._current_scope") as mock_token_var: - mock_token_var.reset.side_effect = LookupError() +def test_set_tag_converts_to_string(key, value, expected): + """Test that set_tag converts various types to strings.""" + scope = Scope() + scope.set_tag(key, value) - mock_token = mock.Mock() - mock_token_var.set.return_value = mock_token + event = scope.apply_to_event({}, {}) + tags = event.get("tags", {}) - try: - if scope_manager == use_scope: - with scope_manager(Scope()): - pass - else: - with scope_manager(): - pass + assert tags[key] == expected, f"Tag {key} was not converted properly" - except Exception: - pytest.fail("Context manager should handle LookupError gracefully") - mock_capture.assert_called_once() - mock_token_var.reset.assert_called_once_with(mock_token) +def test_set_tags_converts_to_string(): + """Test that set_tags converts all values to strings.""" + scope = Scope() + scope.set_tags( + { + "int": 123, + "float": 123.456, + "bool": True, + "none": None, + "list": [1, 2, 3], + "string": "test", + } + ) -@pytest.mark.tests_internal_exceptions -@pytest.mark.parametrize( - "scope_manager", - [ - isolation_scope, - use_isolation_scope, - ], -) -def test_handle_lookup_error_on_token_reset_isolation_scope(scope_manager): - with mock.patch("sentry_sdk.scope.capture_internal_exception") as mock_capture: - with mock.patch("sentry_sdk.scope._current_scope") as mock_current_scope: - with mock.patch( - "sentry_sdk.scope._isolation_scope" - ) as mock_isolation_scope: - mock_isolation_scope.reset.side_effect = LookupError() - mock_current_token = mock.Mock() - mock_current_scope.set.return_value = mock_current_token - - try: - if scope_manager == use_isolation_scope: - with scope_manager(Scope()): - pass - else: - with scope_manager(): - pass - - except Exception: - pytest.fail("Context manager should handle LookupError gracefully") - - mock_capture.assert_called_once() - mock_current_scope.reset.assert_called_once_with(mock_current_token) + event = scope.apply_to_event({}, {}) + tags = event.get("tags", {}) + + assert tags["int"] == "123" + assert tags["float"] == "123.456" + assert tags["bool"] == "True" + assert tags["none"] == "None" + assert tags["list"] == "[1, 2, 3]" + assert tags["string"] == "test" + + +def test_set_tag_handles_conversion_failure(): + """Test that set_tag handles objects that fail to convert to string.""" + scope = Scope() + + # Create an object that raises an exception when str() is called + class BadObject: + def __str__(self): + raise Exception("Cannot convert to string") + + def __repr__(self): + return "BadObject()" + + bad_obj = BadObject() + + # This should not raise an exception + scope.set_tag("bad_object", bad_obj) + + # The tag should be set with the repr value + event = scope.apply_to_event({}, {}) + tags = event.get("tags", {}) + + assert tags["bad_object"] == "BadObject()", "Tag should be set with repr value" + + +def test_set_tags_handles_conversion_failure(): + """Test that set_tags handles objects that fail to convert to string.""" + scope = Scope() + + # Create an object that raises an exception when str() is called + class BadObject: + def __str__(self): + raise Exception("Cannot convert to string") + + def __repr__(self): + return "BadObject()" + + bad_obj = BadObject() + + scope.set_tags( + { + "good_tag1": "value1", + "bad_tag": bad_obj, + "good_tag2": 123, + } + ) + + event = scope.apply_to_event({}, {}) + tags = event.get("tags", {}) + + assert tags["good_tag1"] == "value1" + assert tags["bad_tag"] == "BadObject()", "Tag should be set with repr value" + assert tags["good_tag2"] == "123" diff --git a/tests/test_scrubber.py b/tests/test_scrubber.py index 2cc5f4139f..cc99411778 100644 --- a/tests/test_scrubber.py +++ b/tests/test_scrubber.py @@ -1,7 +1,8 @@ import sys import logging -from sentry_sdk import capture_exception, capture_event, start_transaction, start_span +from sentry_sdk import capture_exception, capture_event, start_span +from sentry_sdk.integrations.logging import LoggingIntegration from sentry_sdk.utils import event_from_exception from sentry_sdk.scrubber import EventScrubber from tests.conftest import ApproxDict @@ -119,7 +120,10 @@ def test_stack_var_scrubbing(sentry_init, capture_events): def test_breadcrumb_extra_scrubbing(sentry_init, capture_events): - sentry_init(max_breadcrumbs=2) + sentry_init( + max_breadcrumbs=2, + integrations=[LoggingIntegration(event_level="ERROR")], + ) events = capture_events() logger.info("breadcrumb 1", extra=dict(foo=1, password="secret")) logger.info("breadcrumb 2", extra=dict(bar=2, auth="secret")) @@ -153,10 +157,10 @@ def test_span_data_scrubbing(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() - with start_transaction(name="hi"): + with start_span(name="hi"): with start_span(op="foo", name="bar") as span: - span.set_data("password", "secret") - span.set_data("datafoo", "databar") + span.set_attribute("password", "secret") + span.set_attribute("datafoo", "databar") (event,) = events assert event["spans"][0]["data"] == ApproxDict( diff --git a/tests/test_sessions.py b/tests/test_sessions.py index 731b188727..06c446be68 100644 --- a/tests/test_sessions.py +++ b/tests/test_sessions.py @@ -1,7 +1,7 @@ from unittest import mock import sentry_sdk -from sentry_sdk.sessions import auto_session_tracking, track_session +from sentry_sdk.sessions import track_session def sorted_aggregates(item): @@ -83,52 +83,13 @@ def test_aggregates(sentry_init, capture_envelopes): assert aggregates[0]["errored"] == 1 -def test_aggregates_deprecated( - sentry_init, capture_envelopes, suppress_deprecation_warnings -): - sentry_init( - release="fun-release", - environment="not-fun-env", - ) - envelopes = capture_envelopes() - - with auto_session_tracking(session_mode="request"): - with sentry_sdk.new_scope() as scope: - try: - scope.set_user({"id": "42"}) - raise Exception("all is wrong") - except Exception: - sentry_sdk.capture_exception() - - with auto_session_tracking(session_mode="request"): - pass - - sentry_sdk.get_isolation_scope().start_session(session_mode="request") - sentry_sdk.get_isolation_scope().end_session() - sentry_sdk.flush() - - assert len(envelopes) == 2 - assert envelopes[0].get_event() is not None - - sess = envelopes[1] - assert len(sess.items) == 1 - sess_event = sess.items[0].payload.json - assert sess_event["attrs"] == { - "release": "fun-release", - "environment": "not-fun-env", - } - - aggregates = sorted_aggregates(sess_event) - assert len(aggregates) == 1 - assert aggregates[0]["exited"] == 2 - assert aggregates[0]["errored"] == 1 - - def test_aggregates_explicitly_disabled_session_tracking_request_mode( sentry_init, capture_envelopes ): sentry_init( - release="fun-release", environment="not-fun-env", auto_session_tracking=False + release="fun-release", + environment="not-fun-env", + auto_session_tracking=False, ) envelopes = capture_envelopes() @@ -157,38 +118,6 @@ def test_aggregates_explicitly_disabled_session_tracking_request_mode( assert "errored" not in aggregates[0] -def test_aggregates_explicitly_disabled_session_tracking_request_mode_deprecated( - sentry_init, capture_envelopes, suppress_deprecation_warnings -): - sentry_init( - release="fun-release", environment="not-fun-env", auto_session_tracking=False - ) - envelopes = capture_envelopes() - - with auto_session_tracking(session_mode="request"): - with sentry_sdk.new_scope(): - try: - raise Exception("all is wrong") - except Exception: - sentry_sdk.capture_exception() - - with auto_session_tracking(session_mode="request"): - pass - - sentry_sdk.get_isolation_scope().start_session(session_mode="request") - sentry_sdk.get_isolation_scope().end_session() - sentry_sdk.flush() - - sess = envelopes[1] - assert len(sess.items) == 1 - sess_event = sess.items[0].payload.json - - aggregates = sorted_aggregates(sess_event) - assert len(aggregates) == 1 - assert aggregates[0]["exited"] == 1 - assert "errored" not in aggregates[0] - - def test_no_thread_on_shutdown_no_errors(sentry_init): sentry_init( release="fun-release", @@ -231,15 +160,17 @@ def test_no_thread_on_shutdown_no_errors_deprecated( "threading.Thread.start", side_effect=RuntimeError("can't create new thread at interpreter shutdown"), ): - with auto_session_tracking(session_mode="request"): - with sentry_sdk.new_scope(): - try: - raise Exception("all is wrong") - except Exception: - sentry_sdk.capture_exception() + with sentry_sdk.isolation_scope() as scope: + with track_session(scope, session_mode="request"): + with sentry_sdk.new_scope(): + try: + raise Exception("all is wrong") + except Exception: + sentry_sdk.capture_exception() - with auto_session_tracking(session_mode="request"): - pass + with sentry_sdk.isolation_scope() as scope: + with track_session(scope, session_mode="request"): + pass sentry_sdk.get_isolation_scope().start_session(session_mode="request") sentry_sdk.get_isolation_scope().end_session() diff --git a/tests/test_tracing_utils.py b/tests/test_tracing_utils.py index 2b2c62a6f9..70a7d0ed4c 100644 --- a/tests/test_tracing_utils.py +++ b/tests/test_tracing_utils.py @@ -5,8 +5,7 @@ import pytest -def id_function(val): - # type: (object) -> str +def id_function(val: object) -> str: if isinstance(val, ShouldBeIncludedTestCase): return val.id @@ -88,8 +87,9 @@ class ShouldBeIncludedTestCase: ], ids=id_function, ) -def test_should_be_included(test_case, expected): - # type: (ShouldBeIncludedTestCase, bool) -> None +def test_should_be_included( + test_case: ShouldBeIncludedTestCase, expected: bool +) -> None: """Checking logic, see: https://github.com/getsentry/sentry-python/issues/3312""" kwargs = asdict(test_case) kwargs.pop("id") diff --git a/tests/test_transport.py b/tests/test_transport.py index c6a1a0a7a7..300251fc0c 100644 --- a/tests/test_transport.py +++ b/tests/test_transport.py @@ -22,14 +22,12 @@ capture_message, isolation_scope, get_isolation_scope, - Hub, ) -from sentry_sdk._compat import PY37, PY38 -from sentry_sdk.envelope import Envelope, Item, parse_json +from sentry_sdk.envelope import Envelope, parse_json +from sentry_sdk._compat import PY38 from sentry_sdk.transport import ( KEEP_ALIVE_SOCKET_OPTIONS, _parse_rate_limits, - HttpTransport, ) from sentry_sdk.integrations.logging import LoggingIntegration, ignore_logger @@ -63,8 +61,7 @@ def inner(**kwargs): return inner -def mock_transaction_envelope(span_count): - # type: (int) -> Envelope +def mock_transaction_envelope(span_count: int) -> Envelope: event = defaultdict( mock.MagicMock, type="transaction", @@ -81,10 +78,7 @@ def mock_transaction_envelope(span_count): @pytest.mark.parametrize("client_flush_method", ["close", "flush"]) @pytest.mark.parametrize("use_pickle", (True, False)) @pytest.mark.parametrize("compression_level", (0, 9, None)) -@pytest.mark.parametrize( - "compression_algo", - (("gzip", "br", "", None) if PY37 else ("gzip", "", None)), -) +@pytest.mark.parametrize("compression_algo", ("gzip", "br", "", None)) @pytest.mark.parametrize("http2", [True, False] if PY38 else [False]) def test_transport_works( capturing_server, @@ -592,135 +586,6 @@ def test_complex_limits_without_data_category( assert len(capturing_server.captured) == 0 -@pytest.mark.parametrize("response_code", [200, 429]) -def test_metric_bucket_limits(capturing_server, response_code, make_client): - client = make_client() - capturing_server.respond_with( - code=response_code, - headers={ - "X-Sentry-Rate-Limits": "4711:metric_bucket:organization:quota_exceeded:custom" - }, - ) - - envelope = Envelope() - envelope.add_item(Item(payload=b"{}", type="statsd")) - client.transport.capture_envelope(envelope) - client.flush() - - assert len(capturing_server.captured) == 1 - assert capturing_server.captured[0].path == "/api/132/envelope/" - capturing_server.clear_captured() - - assert set(client.transport._disabled_until) == set(["metric_bucket"]) - - client.transport.capture_envelope(envelope) - client.capture_event({"type": "transaction"}) - client.flush() - - assert len(capturing_server.captured) == 2 - - envelope = capturing_server.captured[0].envelope - assert envelope.items[0].type == "transaction" - envelope = capturing_server.captured[1].envelope - assert envelope.items[0].type == "client_report" - report = parse_json(envelope.items[0].get_bytes()) - assert report["discarded_events"] == [ - {"category": "metric_bucket", "reason": "ratelimit_backoff", "quantity": 1}, - ] - - -@pytest.mark.parametrize("response_code", [200, 429]) -def test_metric_bucket_limits_with_namespace( - capturing_server, response_code, make_client -): - client = make_client() - capturing_server.respond_with( - code=response_code, - headers={ - "X-Sentry-Rate-Limits": "4711:metric_bucket:organization:quota_exceeded:foo" - }, - ) - - envelope = Envelope() - envelope.add_item(Item(payload=b"{}", type="statsd")) - client.transport.capture_envelope(envelope) - client.flush() - - assert len(capturing_server.captured) == 1 - assert capturing_server.captured[0].path == "/api/132/envelope/" - capturing_server.clear_captured() - - assert set(client.transport._disabled_until) == set([]) - - client.transport.capture_envelope(envelope) - client.capture_event({"type": "transaction"}) - client.flush() - - assert len(capturing_server.captured) == 2 - - envelope = capturing_server.captured[0].envelope - assert envelope.items[0].type == "statsd" - envelope = capturing_server.captured[1].envelope - assert envelope.items[0].type == "transaction" - - -@pytest.mark.parametrize("response_code", [200, 429]) -def test_metric_bucket_limits_with_all_namespaces( - capturing_server, response_code, make_client -): - client = make_client() - capturing_server.respond_with( - code=response_code, - headers={ - "X-Sentry-Rate-Limits": "4711:metric_bucket:organization:quota_exceeded" - }, - ) - - envelope = Envelope() - envelope.add_item(Item(payload=b"{}", type="statsd")) - client.transport.capture_envelope(envelope) - client.flush() - - assert len(capturing_server.captured) == 1 - assert capturing_server.captured[0].path == "/api/132/envelope/" - capturing_server.clear_captured() - - assert set(client.transport._disabled_until) == set(["metric_bucket"]) - - client.transport.capture_envelope(envelope) - client.capture_event({"type": "transaction"}) - client.flush() - - assert len(capturing_server.captured) == 2 - - envelope = capturing_server.captured[0].envelope - assert envelope.items[0].type == "transaction" - envelope = capturing_server.captured[1].envelope - assert envelope.items[0].type == "client_report" - report = parse_json(envelope.items[0].get_bytes()) - assert report["discarded_events"] == [ - {"category": "metric_bucket", "reason": "ratelimit_backoff", "quantity": 1}, - ] - - -def test_hub_cls_backwards_compat(): - class TestCustomHubClass(Hub): - pass - - transport = HttpTransport( - defaultdict(lambda: None, {"dsn": "https://123abc@example.com/123"}) - ) - - with pytest.deprecated_call(): - assert transport.hub_cls is Hub - - with pytest.deprecated_call(): - transport.hub_cls = TestCustomHubClass - - with pytest.deprecated_call(): - assert transport.hub_cls is TestCustomHubClass - - @pytest.mark.parametrize("quantity", (1, 2, 10)) def test_record_lost_event_quantity(capturing_server, make_client, quantity): client = make_client() diff --git a/tests/test_utils.py b/tests/test_utils.py index b268fbd57b..fb63ccb517 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -31,6 +31,7 @@ is_sentry_url, _get_installed_modules, ensure_integration_enabled, + _serialize_span_attribute, to_string, exc_info_from_error, get_lines_from_file, @@ -53,8 +54,7 @@ class TestIntegration(Integration): gevent = None -def _normalize_distribution_name(name): - # type: (str) -> str +def _normalize_distribution_name(name: str) -> str: """Normalize distribution name according to PEP-0503. See: @@ -116,20 +116,6 @@ def test_datetime_from_isoformat(input_str, expected_output): assert datetime_from_isoformat(input_str) == expected_output, input_str -@pytest.mark.parametrize( - ("input_str", "expected_output"), - isoformat_inputs_and_datetime_outputs, -) -def test_datetime_from_isoformat_with_py_36_or_lower(input_str, expected_output): - """ - `fromisoformat` was added in Python version 3.7 - """ - with mock.patch("sentry_sdk.utils.datetime") as datetime_mocked: - datetime_mocked.fromisoformat.side_effect = AttributeError() - datetime_mocked.strptime = datetime.strptime - assert datetime_from_isoformat(input_str) == expected_output, input_str - - @pytest.mark.parametrize( "env_var_value,strict,expected", [ @@ -505,7 +491,7 @@ def test_accepts_valid_sample_rate(rate): with mock.patch.object(logger, "warning", mock.Mock()): result = is_valid_sample_rate(rate, source="Testing") assert logger.warning.called is False - assert result is True + assert result == float(rate) @pytest.mark.parametrize( @@ -526,7 +512,7 @@ def test_warns_on_invalid_sample_rate(rate, StringContaining): # noqa: N803 with mock.patch.object(logger, "warning", mock.Mock()): result = is_valid_sample_rate(rate, source="Testing") logger.warning.assert_any_call(StringContaining("Given sample rate is invalid")) - assert result is False + assert result is None @pytest.mark.parametrize( @@ -965,6 +951,37 @@ def test_format_timestamp_naive(): assert re.fullmatch(timestamp_regex, format_timestamp(datetime_object)) +class NoStr: + def __str__(self): + 1 / 0 + + +@pytest.mark.parametrize( + ("value", "result"), + ( + ("meow", "meow"), + (1, 1), + (47.0, 47.0), + (True, True), + (["meow", "bark"], ["meow", "bark"]), + ([True, False], [True, False]), + ([1, 2, 3], [1, 2, 3]), + ([46.5, 47.0, 47.5], [46.5, 47.0, 47.5]), + (["meow", 47], '["meow", 47]'), # mixed types not allowed in a list + (None, "null"), + ( + {"cat": "meow", "dog": ["bark", "woof"]}, + '{"cat": "meow", "dog": ["bark", "woof"]}', + ), + (datetime(2024, 1, 1), "2024-01-01 00:00:00"), + (("meow", "purr"), ["meow", "purr"]), + (NoStr(), None), + ), +) +def test_serialize_span_attribute(value, result): + assert _serialize_span_attribute(value) == result + + def test_qualname_from_function_inner_function(): def test_function(): ... diff --git a/tests/tracing/test_decorator.py b/tests/tracing/test_decorator.py index 18a66bd43e..c8a384f42f 100644 --- a/tests/tracing/test_decorator.py +++ b/tests/tracing/test_decorator.py @@ -3,10 +3,9 @@ import pytest +import sentry_sdk from sentry_sdk.tracing import trace -from sentry_sdk.tracing_utils import start_child_span_decorator from sentry_sdk.utils import logger -from tests.conftest import patch_start_tracing_child def my_example_function(): @@ -17,67 +16,80 @@ async def my_async_example_function(): return "return_of_async_function" -@pytest.mark.forked -def test_trace_decorator(): - with patch_start_tracing_child() as fake_start_child: +def test_trace_decorator(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + with sentry_sdk.start_span(name="test"): result = my_example_function() - fake_start_child.assert_not_called() assert result == "return_of_sync_function" - result2 = start_child_span_decorator(my_example_function)() - fake_start_child.assert_called_once_with( - op="function", name="test_decorator.my_example_function" - ) + result2 = trace(my_example_function)() assert result2 == "return_of_sync_function" + (event,) = events + (span,) = event["spans"] + assert span["op"] == "function" + assert span["description"] == "test_decorator.my_example_function" + + +def test_trace_decorator_no_trx(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + with mock.patch.object(logger, "debug", mock.Mock()) as fake_debug: + result = my_example_function() + assert result == "return_of_sync_function" + fake_debug.assert_not_called() -def test_trace_decorator_no_trx(): - with patch_start_tracing_child(fake_transaction_is_none=True): - with mock.patch.object(logger, "debug", mock.Mock()) as fake_debug: - result = my_example_function() - fake_debug.assert_not_called() - assert result == "return_of_sync_function" + result2 = trace(my_example_function)() + assert result2 == "return_of_sync_function" + fake_debug.assert_called_once_with( + "Cannot create a child span for %s. " + "Please start a Sentry transaction before calling this function.", + "test_decorator.my_example_function", + ) - result2 = start_child_span_decorator(my_example_function)() - fake_debug.assert_called_once_with( - "Cannot create a child span for %s. " - "Please start a Sentry transaction before calling this function.", - "test_decorator.my_example_function", - ) - assert result2 == "return_of_sync_function" + assert len(events) == 0 -@pytest.mark.forked @pytest.mark.asyncio -async def test_trace_decorator_async(): - with patch_start_tracing_child() as fake_start_child: +async def test_trace_decorator_async(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + with sentry_sdk.start_span(name="test"): result = await my_async_example_function() - fake_start_child.assert_not_called() assert result == "return_of_async_function" - result2 = await start_child_span_decorator(my_async_example_function)() - fake_start_child.assert_called_once_with( - op="function", - name="test_decorator.my_async_example_function", - ) + result2 = await trace(my_async_example_function)() assert result2 == "return_of_async_function" + (event,) = events + (span,) = event["spans"] + assert span["op"] == "function" + assert span["description"] == "test_decorator.my_async_example_function" + @pytest.mark.asyncio -async def test_trace_decorator_async_no_trx(): - with patch_start_tracing_child(fake_transaction_is_none=True): - with mock.patch.object(logger, "debug", mock.Mock()) as fake_debug: - result = await my_async_example_function() - fake_debug.assert_not_called() - assert result == "return_of_async_function" - - result2 = await start_child_span_decorator(my_async_example_function)() - fake_debug.assert_called_once_with( - "Cannot create a child span for %s. " - "Please start a Sentry transaction before calling this function.", - "test_decorator.my_async_example_function", - ) - assert result2 == "return_of_async_function" +async def test_trace_decorator_async_no_trx(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + with mock.patch.object(logger, "debug", mock.Mock()) as fake_debug: + result = await my_async_example_function() + fake_debug.assert_not_called() + assert result == "return_of_async_function" + + result2 = await trace(my_async_example_function)() + fake_debug.assert_called_once_with( + "Cannot create a child span for %s. " + "Please start a Sentry transaction before calling this function.", + "test_decorator.my_async_example_function", + ) + assert result2 == "return_of_async_function" + + assert len(events) == 0 def test_functions_to_trace_signature_unchanged_sync(sentry_init): diff --git a/tests/tracing/test_deprecated.py b/tests/tracing/test_deprecated.py deleted file mode 100644 index fb58e43ebf..0000000000 --- a/tests/tracing/test_deprecated.py +++ /dev/null @@ -1,59 +0,0 @@ -import warnings - -import pytest - -import sentry_sdk -import sentry_sdk.tracing -from sentry_sdk import start_span - -from sentry_sdk.tracing import Span - - -@pytest.mark.skip(reason="This deprecated feature has been removed in SDK 2.0.") -def test_start_span_to_start_transaction(sentry_init, capture_events): - # XXX: this only exists for backwards compatibility with code before - # Transaction / start_transaction were introduced. - sentry_init(traces_sample_rate=1.0) - events = capture_events() - - with start_span(transaction="/1/"): - pass - - with start_span(Span(transaction="/2/")): - pass - - assert len(events) == 2 - assert events[0]["transaction"] == "/1/" - assert events[1]["transaction"] == "/2/" - - -@pytest.mark.parametrize( - "parameter_value_getter", - # Use lambda to avoid Hub deprecation warning here (will suppress it in the test) - (lambda: sentry_sdk.Hub(), lambda: sentry_sdk.Scope()), -) -def test_passing_hub_parameter_to_transaction_finish( - suppress_deprecation_warnings, parameter_value_getter -): - parameter_value = parameter_value_getter() - transaction = sentry_sdk.tracing.Transaction() - with pytest.warns(DeprecationWarning): - transaction.finish(hub=parameter_value) - - -def test_passing_hub_object_to_scope_transaction_finish(suppress_deprecation_warnings): - transaction = sentry_sdk.tracing.Transaction() - - # Do not move the following line under the `with` statement. Otherwise, the Hub.__init__ deprecation - # warning will be confused with the transaction.finish deprecation warning that we are testing. - hub = sentry_sdk.Hub() - - with pytest.warns(DeprecationWarning): - transaction.finish(hub) - - -def test_no_warnings_scope_to_transaction_finish(): - transaction = sentry_sdk.tracing.Transaction() - with warnings.catch_warnings(): - warnings.simplefilter("error") - transaction.finish(sentry_sdk.Scope()) diff --git a/tests/tracing/test_http_headers.py b/tests/tracing/test_http_headers.py deleted file mode 100644 index 6a8467101e..0000000000 --- a/tests/tracing/test_http_headers.py +++ /dev/null @@ -1,56 +0,0 @@ -from unittest import mock - -import pytest - -from sentry_sdk.tracing import Transaction -from sentry_sdk.tracing_utils import extract_sentrytrace_data - - -@pytest.mark.parametrize("sampled", [True, False, None]) -def test_to_traceparent(sampled): - transaction = Transaction( - name="/interactions/other-dogs/new-dog", - op="greeting.sniff", - trace_id="12312012123120121231201212312012", - sampled=sampled, - ) - - traceparent = transaction.to_traceparent() - - parts = traceparent.split("-") - assert parts[0] == "12312012123120121231201212312012" # trace_id - assert parts[1] == transaction.span_id # parent_span_id - if sampled is None: - assert len(parts) == 2 - else: - assert parts[2] == "1" if sampled is True else "0" # sampled - - -@pytest.mark.parametrize("sampling_decision", [True, False]) -def test_sentrytrace_extraction(sampling_decision): - sentrytrace_header = "12312012123120121231201212312012-0415201309082013-{}".format( - 1 if sampling_decision is True else 0 - ) - assert extract_sentrytrace_data(sentrytrace_header) == { - "trace_id": "12312012123120121231201212312012", - "parent_span_id": "0415201309082013", - "parent_sampled": sampling_decision, - } - - -def test_iter_headers(monkeypatch): - monkeypatch.setattr( - Transaction, - "to_traceparent", - mock.Mock(return_value="12312012123120121231201212312012-0415201309082013-0"), - ) - - transaction = Transaction( - name="/interactions/other-dogs/new-dog", - op="greeting.sniff", - ) - - headers = dict(transaction.iter_headers()) - assert ( - headers["sentry-trace"] == "12312012123120121231201212312012-0415201309082013-0" - ) diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py index 61ef14b7d0..2b738fcf59 100644 --- a/tests/tracing/test_integration_tests.py +++ b/tests/tracing/test_integration_tests.py @@ -1,7 +1,6 @@ -import gc import re import sys -import weakref +import asyncio from unittest import mock import pytest @@ -9,12 +8,12 @@ import sentry_sdk from sentry_sdk import ( capture_message, + continue_trace, start_span, - start_transaction, ) from sentry_sdk.consts import SPANSTATUS from sentry_sdk.transport import Transport -from sentry_sdk.tracing import Transaction +from tests.conftest import SortedBaggage @pytest.mark.parametrize("sample_rate", [0.0, 1.0]) @@ -22,8 +21,8 @@ def test_basic(sentry_init, capture_events, sample_rate): sentry_init(traces_sample_rate=sample_rate) events = capture_events() - with start_transaction(name="hi") as transaction: - transaction.set_status(SPANSTATUS.OK) + with start_span(name="hi") as root_span: + root_span.set_status(SPANSTATUS.OK) with pytest.raises(ZeroDivisionError): with start_span(op="foo", name="foodesc"): 1 / 0 @@ -40,24 +39,21 @@ def test_basic(sentry_init, capture_events, sample_rate): span1, span2 = event["spans"] parent_span = event - assert span1["tags"]["status"] == "internal_error" + assert span1["status"] == "internal_error" assert span1["op"] == "foo" assert span1["description"] == "foodesc" - assert "status" not in span2.get("tags", {}) + assert span2["status"] == "ok" assert span2["op"] == "bar" assert span2["description"] == "bardesc" assert parent_span["transaction"] == "hi" - assert "status" not in event["tags"] + assert "status" not in event.get("tags", {}) assert event["contexts"]["trace"]["status"] == "ok" else: assert not events -@pytest.mark.parametrize("parent_sampled", [True, False, None]) @pytest.mark.parametrize("sample_rate", [0.0, 1.0]) -def test_continue_from_headers( - sentry_init, capture_envelopes, parent_sampled, sample_rate -): +def test_continue_trace(sentry_init, capture_envelopes, sample_rate): # noqa:N803 """ Ensure data is actually passed along via headers, and that they are read correctly. @@ -66,55 +62,41 @@ def test_continue_from_headers( envelopes = capture_envelopes() # make a parent transaction (normally this would be in a different service) - with start_transaction(name="hi", sampled=True if sample_rate == 0 else None): - with start_span() as old_span: - old_span.sampled = parent_sampled - headers = dict( - sentry_sdk.get_current_scope().iter_trace_propagation_headers(old_span) - ) - headers["baggage"] = ( - "other-vendor-value-1=foo;bar;baz, " - "sentry-trace_id=771a43a4192642f0b136d5159a501700, " - "sentry-public_key=49d0f7386ad645858ae85020e393bef3, " - "sentry-sample_rate=0.01337, sentry-user_id=Amelie, " - "other-vendor-value-2=foo;bar;" - ) + with start_span(name="hi"): + with start_span(name="inner") as old_span: + headers = dict(old_span.iter_headers()) + assert headers["sentry-trace"] + assert headers["baggage"] # child transaction, to prove that we can read 'sentry-trace' header data correctly - child_transaction = Transaction.continue_from_headers(headers, name="WRONG") - assert child_transaction is not None - assert child_transaction.parent_sampled == parent_sampled - assert child_transaction.trace_id == old_span.trace_id - assert child_transaction.same_process_as_parent is False - assert child_transaction.parent_span_id == old_span.span_id - assert child_transaction.span_id != old_span.span_id - - baggage = child_transaction._baggage - assert baggage - assert not baggage.mutable - assert baggage.sentry_items == { - "public_key": "49d0f7386ad645858ae85020e393bef3", - "trace_id": "771a43a4192642f0b136d5159a501700", - "user_id": "Amelie", - "sample_rate": "0.01337", - } - - # add child transaction to the scope, to show that the captured message will - # be tagged with the trace id (since it happens while the transaction is - # open) - with start_transaction(child_transaction): - # change the transaction name from "WRONG" to make sure the change - # is reflected in the final data - sentry_sdk.get_current_scope().transaction = "ho" - capture_message("hello") - - if parent_sampled is False or (sample_rate == 0 and parent_sampled is None): - # in this case the child transaction won't be captured - trace1, message = envelopes + with continue_trace(headers): + with start_span(name="WRONG") as child_root_span: + assert child_root_span is not None + assert child_root_span.sampled == (sample_rate == 1.0) + if child_root_span.sampled: + assert child_root_span.parent_span_id == old_span.span_id + assert child_root_span.trace_id == old_span.trace_id + assert child_root_span.span_id != old_span.span_id + + baggage = child_root_span.get_baggage() + assert baggage.serialize() == SortedBaggage(headers["baggage"]) + + # change the transaction name from "WRONG" to make sure the change + # is reflected in the final data + sentry_sdk.get_current_scope().set_transaction_name("ho") + # to show that the captured message will be tagged with the trace id + # (since it happens while the transaction is open) + capture_message("hello") + + # in this case the child transaction won't be captured + # but message follows twp spec + if sample_rate == 0.0: + (message,) = envelopes message_payload = message.get_event() - trace1_payload = trace1.get_transaction_event() - - assert trace1_payload["transaction"] == "hi" + assert message_payload["transaction"] == "ho" + assert ( + child_root_span.trace_id == message_payload["contexts"]["trace"]["trace_id"] + ) else: trace1, message, trace2 = envelopes trace1_payload = trace1.get_transaction_event() @@ -127,74 +109,42 @@ def test_continue_from_headers( assert ( trace1_payload["contexts"]["trace"]["trace_id"] == trace2_payload["contexts"]["trace"]["trace_id"] - == child_transaction.trace_id + == child_root_span.trace_id == message_payload["contexts"]["trace"]["trace_id"] ) - if parent_sampled is not None: - expected_sample_rate = str(float(parent_sampled)) - else: - expected_sample_rate = str(sample_rate) - assert trace2.headers["trace"] == baggage.dynamic_sampling_context() - assert trace2.headers["trace"] == { - "public_key": "49d0f7386ad645858ae85020e393bef3", - "trace_id": "771a43a4192642f0b136d5159a501700", - "user_id": "Amelie", - "sample_rate": expected_sample_rate, - } assert message_payload["message"] == "hello" -@pytest.mark.parametrize("sample_rate", [0.0, 1.0]) -def test_propagate_traces_deprecation_warning(sentry_init, sample_rate): - sentry_init(traces_sample_rate=sample_rate, propagate_traces=False) - - with start_transaction(name="hi"): - with start_span() as old_span: - with pytest.warns(DeprecationWarning): - dict( - sentry_sdk.get_current_scope().iter_trace_propagation_headers( - old_span - ) - ) - - @pytest.mark.parametrize("sample_rate", [0.5, 1.0]) def test_dynamic_sampling_head_sdk_creates_dsc( - sentry_init, capture_envelopes, sample_rate, monkeypatch + sentry_init, + capture_envelopes, + sample_rate, + monkeypatch, ): sentry_init(traces_sample_rate=sample_rate, release="foo") envelopes = capture_envelopes() # make sure transaction is sampled for both cases with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.25): - transaction = Transaction.continue_from_headers({}, name="Head SDK tx") - # will create empty mutable baggage - baggage = transaction._baggage - assert baggage - assert baggage.mutable - assert baggage.sentry_items == {} - assert baggage.third_party_items == "" - - with start_transaction(transaction): - with start_span(op="foo", name="foodesc"): - pass + with continue_trace({}): + with start_span(name="Head SDK tx"): + with start_span(op="foo", name="foodesc") as span: + baggage = span.get_baggage() - # finish will create a new baggage entry - baggage = transaction._baggage - trace_id = transaction.trace_id + trace_id = span.trace_id assert baggage - assert not baggage.mutable assert baggage.third_party_items == "" assert baggage.sentry_items == { "environment": "production", "release": "foo", "sample_rate": str(sample_rate), - "sampled": "true" if transaction.sampled else "false", + "sampled": "true" if span.sampled else "false", "sample_rand": "0.250000", "transaction": "Head SDK tx", "trace_id": trace_id, @@ -208,9 +158,9 @@ def test_dynamic_sampling_head_sdk_creates_dsc( "sentry-transaction=Head%%20SDK%%20tx," "sentry-sample_rate=%s," "sentry-sampled=%s" - % (trace_id, sample_rate, "true" if transaction.sampled else "false") + % (trace_id, sample_rate, "true" if span.sampled else "false") ) - assert baggage.serialize() == expected_baggage + assert baggage.serialize() == SortedBaggage(expected_baggage) (envelope,) = envelopes assert envelope.headers["trace"] == baggage.dynamic_sampling_context() @@ -218,42 +168,13 @@ def test_dynamic_sampling_head_sdk_creates_dsc( "environment": "production", "release": "foo", "sample_rate": str(sample_rate), + "sampled": "true" if span.sampled else "false", "sample_rand": "0.250000", - "sampled": "true" if transaction.sampled else "false", "transaction": "Head SDK tx", "trace_id": trace_id, } -@pytest.mark.parametrize( - "args,expected_refcount", - [({"traces_sample_rate": 1.0}, 100), ({"traces_sample_rate": 0.0}, 0)], -) -def test_memory_usage(sentry_init, capture_events, args, expected_refcount): - sentry_init(**args) - - references = weakref.WeakSet() - - with start_transaction(name="hi"): - for i in range(100): - with start_span(op="helloworld", name="hi {}".format(i)) as span: - - def foo(): - pass - - references.add(foo) - span.set_tag("foo", foo) - pass - - del foo - del span - - # required only for pypy (cpython frees immediately) - gc.collect() - - assert len(references) == expected_refcount - - def test_transactions_do_not_go_through_before_send(sentry_init, capture_events): def before_send(event, hint): raise RuntimeError("should not be called") @@ -261,7 +182,7 @@ def before_send(event, hint): sentry_init(traces_sample_rate=1.0, before_send=before_send) events = capture_events() - with start_transaction(name="/"): + with start_span(name="/"): pass assert len(events) == 1 @@ -279,7 +200,7 @@ def capture_event(self, event): sentry_init(traces_sample_rate=1, transport=CustomTransport()) events = capture_events() - with start_transaction(name="hi"): + with start_span(name="hi"): with start_span(op="bar", name="bardesc"): pass @@ -289,14 +210,14 @@ def capture_event(self, event): def test_trace_propagation_meta_head_sdk(sentry_init): sentry_init(traces_sample_rate=1.0, release="foo") - transaction = Transaction.continue_from_headers({}, name="Head SDK tx") meta = None span = None - with start_transaction(transaction): - with start_span(op="foo", name="foodesc") as current_span: - span = current_span - meta = sentry_sdk.get_current_scope().trace_propagation_meta() + with continue_trace({}): + with start_span(name="Head SDK tx") as root_span: + with start_span(op="foo", name="foodesc") as current_span: + span = current_span + meta = sentry_sdk.get_current_scope().trace_propagation_meta() ind = meta.find(">") + 1 sentry_trace, baggage = meta[:ind], meta[ind:] @@ -307,7 +228,7 @@ def test_trace_propagation_meta_head_sdk(sentry_init): assert 'meta name="baggage"' in baggage baggage_content = re.findall('content="([^"]*)"', baggage)[0] - assert baggage_content == transaction.get_baggage().serialize() + assert SortedBaggage(baggage_content) == root_span.get_baggage().serialize() @pytest.mark.parametrize( @@ -322,8 +243,8 @@ def test_non_error_exceptions( sentry_init(traces_sample_rate=1.0) events = capture_events() - with start_transaction(name="hi") as transaction: - transaction.set_status(SPANSTATUS.OK) + with start_span(name="hi") as root_span: + root_span.set_status(SPANSTATUS.OK) with pytest.raises(exception_cls): with start_span(op="foo", name="foodesc"): raise exception_cls(exception_value) @@ -333,7 +254,7 @@ def test_non_error_exceptions( span = event["spans"][0] assert "status" not in span.get("tags", {}) - assert "status" not in event["tags"] + assert "status" not in event.get("tags", {}) assert event["contexts"]["trace"]["status"] == "ok" @@ -344,8 +265,8 @@ def test_good_sysexit_doesnt_fail_transaction( sentry_init(traces_sample_rate=1.0) events = capture_events() - with start_transaction(name="hi") as transaction: - transaction.set_status(SPANSTATUS.OK) + with start_span(name="hi") as span: + span.set_status(SPANSTATUS.OK) with pytest.raises(SystemExit): with start_span(op="foo", name="foodesc"): if exception_value is not False: @@ -358,5 +279,28 @@ def test_good_sysexit_doesnt_fail_transaction( span = event["spans"][0] assert "status" not in span.get("tags", {}) - assert "status" not in event["tags"] + assert "status" not in event.get("tags", {}) assert event["contexts"]["trace"]["status"] == "ok" + + +@pytest.mark.asyncio +async def test_async_context_manager(sentry_init, capture_events): + """Test that spans work as async context managers""" + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + async with start_span(name="async_transaction") as transaction: + transaction.set_status(SPANSTATUS.OK) + async with start_span(op="async.task", name="async_operation") as span: + span.set_tag("test", "async") + await asyncio.sleep(0.001) + + assert len(events) == 1 + event = events[0] + assert event["transaction"] == "async_transaction" + assert event["contexts"]["trace"]["status"] == "ok" + assert len(event["spans"]) == 1 + span = event["spans"][0] + assert span["op"] == "async.task" + assert span["description"] == "async_operation" + assert span["tags"]["test"] == "async" diff --git a/tests/tracing/test_misc.py b/tests/tracing/test_misc.py index e1de847102..9ef01aa80d 100644 --- a/tests/tracing/test_misc.py +++ b/tests/tracing/test_misc.py @@ -1,14 +1,9 @@ import pytest -import gc -import uuid -import os -from unittest import mock -from unittest.mock import MagicMock +from unittest.mock import MagicMock, ANY import sentry_sdk -from sentry_sdk import start_span, start_transaction, set_measurement +from sentry_sdk import start_span, get_current_scope from sentry_sdk.consts import MATCH_ALL -from sentry_sdk.tracing import Span, Transaction from sentry_sdk.tracing_utils import should_propagate_trace from sentry_sdk.utils import Dsn from tests.conftest import ApproxDict @@ -18,9 +13,9 @@ def test_span_trimming(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0, _experiments={"max_spans": 3}) events = capture_events() - with start_transaction(name="hi"): + with start_span(name="hi"): for i in range(10): - with start_span(op="foo{}".format(i)): + with start_span(op=f"foo{i}"): pass (event,) = events @@ -34,20 +29,19 @@ def test_span_trimming(sentry_init, capture_events): assert event["_meta"]["spans"][""]["len"] == 10 assert "_dropped_spans" not in event - assert "dropped_spans" not in event def test_span_data_scrubbing_and_trimming(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0, _experiments={"max_spans": 3}) events = capture_events() - with start_transaction(name="hi"): + with start_span(name="hi"): with start_span(op="foo", name="bar") as span: - span.set_data("password", "secret") - span.set_data("datafoo", "databar") + span.set_attribute("password", "secret") + span.set_attribute("datafoo", "databar") for i in range(10): - with start_span(op="foo{}".format(i)): + with start_span(op=f"foo{i}"): pass (event,) = events @@ -65,33 +59,33 @@ def test_transaction_naming(sentry_init, capture_events): events = capture_events() # default name in event if no name is passed - with start_transaction() as transaction: + with start_span(): pass assert len(events) == 1 - assert events[0]["transaction"] == "" + assert events[0]["transaction"] == "" # the name can be set once the transaction's already started - with start_transaction() as transaction: - transaction.name = "name-known-after-transaction-started" + with start_span() as span: + span.name = "name-known-after-transaction-started" assert len(events) == 2 assert events[1]["transaction"] == "name-known-after-transaction-started" # passing in a name works, too - with start_transaction(name="a"): + with start_span(name="a"): pass assert len(events) == 3 assert events[2]["transaction"] == "a" -def test_transaction_data(sentry_init, capture_events): +def test_root_span_data(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() - with start_transaction(name="test-transaction"): - span_or_tx = sentry_sdk.get_current_span() - span_or_tx.set_data("foo", "bar") + with start_span(name="test-root-span"): + root_span = sentry_sdk.get_current_span() + root_span.set_attribute("foo", "bar") with start_span(op="test-span") as span: - span.set_data("spanfoo", "spanbar") + span.set_attribute("spanfoo", "spanbar") assert len(events) == 1 @@ -110,259 +104,15 @@ def test_transaction_data(sentry_init, capture_events): assert span_data.items() >= {"spanfoo": "spanbar"}.items() -def test_start_transaction(sentry_init): +def test_finds_spans_on_scope(sentry_init): sentry_init(traces_sample_rate=1.0) - # you can have it start a transaction for you - result1 = start_transaction( - name="/interactions/other-dogs/new-dog", op="greeting.sniff" - ) - assert isinstance(result1, Transaction) - assert result1.name == "/interactions/other-dogs/new-dog" - assert result1.op == "greeting.sniff" - - # or you can pass it an already-created transaction - preexisting_transaction = Transaction( - name="/interactions/other-dogs/new-dog", op="greeting.sniff" - ) - result2 = start_transaction(preexisting_transaction) - assert result2 is preexisting_transaction - - -def test_finds_transaction_on_scope(sentry_init): - sentry_init(traces_sample_rate=1.0) - - transaction = start_transaction(name="dogpark") - - scope = sentry_sdk.get_current_scope() - - # See note in Scope class re: getters and setters of the `transaction` - # property. For the moment, assigning to scope.transaction merely sets the - # transaction name, rather than putting the transaction on the scope, so we - # have to assign to _span directly. - scope._span = transaction - - # Reading scope.property, however, does what you'd expect, and returns the - # transaction on the scope. - assert scope.transaction is not None - assert isinstance(scope.transaction, Transaction) - assert scope.transaction.name == "dogpark" - - # If the transaction is also set as the span on the scope, it can be found - # by accessing _span, too. - assert scope._span is not None - assert isinstance(scope._span, Transaction) - assert scope._span.name == "dogpark" - - -def test_finds_transaction_when_descendent_span_is_on_scope( - sentry_init, -): - sentry_init(traces_sample_rate=1.0) - - transaction = start_transaction(name="dogpark") - child_span = transaction.start_child(op="sniffing") - - scope = sentry_sdk.get_current_scope() - scope._span = child_span - - # this is the same whether it's the transaction itself or one of its - # decedents directly attached to the scope - assert scope.transaction is not None - assert isinstance(scope.transaction, Transaction) - assert scope.transaction.name == "dogpark" - - # here we see that it is in fact the span on the scope, rather than the - # transaction itself - assert scope._span is not None - assert isinstance(scope._span, Span) - assert scope._span.op == "sniffing" - - -def test_finds_orphan_span_on_scope(sentry_init): - # this is deprecated behavior which may be removed at some point (along with - # the start_span function) - sentry_init(traces_sample_rate=1.0) - - span = start_span(op="sniffing") - - scope = sentry_sdk.get_current_scope() - scope._span = span - - assert scope._span is not None - assert isinstance(scope._span, Span) - assert scope._span.op == "sniffing" - - -def test_finds_non_orphan_span_on_scope(sentry_init): - sentry_init(traces_sample_rate=1.0) - - transaction = start_transaction(name="dogpark") - child_span = transaction.start_child(op="sniffing") - - scope = sentry_sdk.get_current_scope() - scope._span = child_span - - assert scope._span is not None - assert isinstance(scope._span, Span) - assert scope._span.op == "sniffing" - - -def test_circular_references(monkeypatch, sentry_init, request): - # TODO: We discovered while writing this test about transaction/span - # reference cycles that there's actually also a circular reference in - # `serializer.py`, between the functions `_serialize_node` and - # `_serialize_node_impl`, both of which are defined inside of the main - # `serialize` function, and each of which calls the other one. For now, in - # order to avoid having those ref cycles give us a false positive here, we - # can mock out `serialize`. In the long run, though, we should probably fix - # that. (Whenever we do work on fixing it, it may be useful to add - # - # gc.set_debug(gc.DEBUG_LEAK) - # request.addfinalizer(lambda: gc.set_debug(~gc.DEBUG_LEAK)) - # - # immediately after the initial collection below, so we can see what new - # objects the garbage collector has to clean up once `transaction.finish` is - # called and the serializer runs.) - monkeypatch.setattr( - sentry_sdk.client, - "serialize", - mock.Mock( - return_value=None, - ), - ) - - # In certain versions of python, in some environments (specifically, python - # 3.4 when run in GH Actions), we run into a `ctypes` bug which creates - # circular references when `uuid4()` is called, as happens when we're - # generating event ids. Mocking it with an implementation which doesn't use - # the `ctypes` function lets us avoid having false positives when garbage - # collecting. See https://bugs.python.org/issue20519. - monkeypatch.setattr( - uuid, - "uuid4", - mock.Mock( - return_value=uuid.UUID(bytes=os.urandom(16)), - ), - ) - - gc.disable() - request.addfinalizer(gc.enable) - - sentry_init(traces_sample_rate=1.0) - - # Make sure that we're starting with a clean slate before we start creating - # transaction/span reference cycles - gc.collect() - - dogpark_transaction = start_transaction(name="dogpark") - sniffing_span = dogpark_transaction.start_child(op="sniffing") - wagging_span = dogpark_transaction.start_child(op="wagging") - - # At some point, you have to stop sniffing - there are balls to chase! - so finish - # this span while the dogpark transaction is still open - sniffing_span.finish() - - # The wagging, however, continues long past the dogpark, so that span will - # NOT finish before the transaction ends. (Doing it in this order proves - # that both finished and unfinished spans get their cycles broken.) - dogpark_transaction.finish() - - # Eventually you gotta sleep... - wagging_span.finish() - - # assuming there are no cycles by this point, these should all be able to go - # out of scope and get their memory deallocated without the garbage - # collector having anything to do - del sniffing_span - del wagging_span - del dogpark_transaction - - assert gc.collect() == 0 - - -def test_set_meaurement(sentry_init, capture_events): - sentry_init(traces_sample_rate=1.0) - - events = capture_events() - - transaction = start_transaction(name="measuring stuff") - - with pytest.raises(TypeError): - transaction.set_measurement() - - with pytest.raises(TypeError): - transaction.set_measurement("metric.foo") + with start_span(name="dogpark") as root_span: + assert get_current_scope().span == root_span - transaction.set_measurement("metric.foo", 123) - transaction.set_measurement("metric.bar", 456, unit="second") - transaction.set_measurement("metric.baz", 420.69, unit="custom") - transaction.set_measurement("metric.foobar", 12, unit="percent") - transaction.set_measurement("metric.foobar", 17.99, unit="percent") - - transaction.finish() - - (event,) = events - assert event["measurements"]["metric.foo"] == {"value": 123, "unit": ""} - assert event["measurements"]["metric.bar"] == {"value": 456, "unit": "second"} - assert event["measurements"]["metric.baz"] == {"value": 420.69, "unit": "custom"} - assert event["measurements"]["metric.foobar"] == {"value": 17.99, "unit": "percent"} - - -def test_set_meaurement_public_api(sentry_init, capture_events): - sentry_init(traces_sample_rate=1.0) - - events = capture_events() - - with start_transaction(name="measuring stuff"): - set_measurement("metric.foo", 123) - set_measurement("metric.bar", 456, unit="second") - - (event,) = events - assert event["measurements"]["metric.foo"] == {"value": 123, "unit": ""} - assert event["measurements"]["metric.bar"] == {"value": 456, "unit": "second"} - - -def test_set_measurement_deprecated(sentry_init): - sentry_init(traces_sample_rate=1.0) - - with start_transaction(name="measuring stuff") as trx: - with pytest.warns(DeprecationWarning): - set_measurement("metric.foo", 123) - - with pytest.warns(DeprecationWarning): - trx.set_measurement("metric.bar", 456) - - with start_span(op="measuring span") as span: - with pytest.warns(DeprecationWarning): - span.set_measurement("metric.baz", 420.69, unit="custom") - - -def test_set_meaurement_compared_to_set_data(sentry_init, capture_events): - """ - This is just a test to see the difference - between measurements and data in the resulting event payload. - """ - sentry_init(traces_sample_rate=1.0) - - events = capture_events() - - with start_transaction(name="measuring stuff") as transaction: - transaction.set_measurement("metric.foo", 123) - transaction.set_data("metric.bar", 456) - - with start_span(op="measuring span") as span: - span.set_measurement("metric.baz", 420.69, unit="custom") - span.set_data("metric.qux", 789) - - (event,) = events - assert event["measurements"]["metric.foo"] == {"value": 123, "unit": ""} - assert event["contexts"]["trace"]["data"]["metric.bar"] == 456 - assert event["spans"][0]["measurements"]["metric.baz"] == { - "value": 420.69, - "unit": "custom", - } - assert event["spans"][0]["data"]["metric.qux"] == 789 + with start_span(name="child") as child_span: + assert get_current_scope().span == child_span + assert child_span.root_span == root_span @pytest.mark.parametrize( @@ -446,82 +196,17 @@ def test_should_propagate_trace_to_sentry( assert should_propagate_trace(client, url) == expected_propagation_decision -def test_start_transaction_updates_scope_name_source(sentry_init): - sentry_init(traces_sample_rate=1.0) - - scope = sentry_sdk.get_current_scope() - - with start_transaction(name="foobar", source="route"): - assert scope._transaction == "foobar" - assert scope._transaction_info == {"source": "route"} - - -@pytest.mark.parametrize("sampled", (True, None)) -def test_transaction_dropped_debug_not_started(sentry_init, sampled): - sentry_init(enable_tracing=True) - - tx = Transaction(sampled=sampled) - - with mock.patch("sentry_sdk.tracing.logger") as mock_logger: - with tx: - pass - - mock_logger.debug.assert_any_call( - "Discarding transaction because it was not started with sentry_sdk.start_transaction" - ) - - with pytest.raises(AssertionError): - # We should NOT see the "sampled = False" message here - mock_logger.debug.assert_any_call( - "Discarding transaction because sampled = False" - ) - - -def test_transaction_dropeed_sampled_false(sentry_init): - sentry_init(enable_tracing=True) - - tx = Transaction(sampled=False) - - with mock.patch("sentry_sdk.tracing.logger") as mock_logger: - with sentry_sdk.start_transaction(tx): - pass - - mock_logger.debug.assert_any_call("Discarding transaction because sampled = False") - - with pytest.raises(AssertionError): - # We should not see the "not started" message here - mock_logger.debug.assert_any_call( - "Discarding transaction because it was not started with sentry_sdk.start_transaction" - ) - - -def test_transaction_not_started_warning(sentry_init): - sentry_init(enable_tracing=True) - - tx = Transaction() - - with mock.patch("sentry_sdk.tracing.logger") as mock_logger: - with tx: - pass - - mock_logger.debug.assert_any_call( - "Transaction was entered without being started with sentry_sdk.start_transaction." - "The transaction will not be sent to Sentry. To fix, start the transaction by" - "passing it to sentry_sdk.start_transaction." - ) - - -def test_span_set_data_update_data(sentry_init, capture_events): +def test_span_set_attributes(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() - with sentry_sdk.start_transaction(name="test-transaction"): - with start_span(op="test-span") as span: + with sentry_sdk.start_span(name="test-root-span"): + with start_span(op="test-span", name="test-span-name") as span: span.set_data("key0", "value0") span.set_data("key1", "value1") - span.update_data( + span.set_attributes( { "key1": "updated-value1", "key2": "value2", @@ -537,8 +222,12 @@ def test_span_set_data_update_data(sentry_init, capture_events): "key1": "updated-value1", "key2": "value2", "key3": "value3", - "thread.id": mock.ANY, - "thread.name": mock.ANY, + "sentry.name": "test-span-name", + "sentry.op": "test-span", + "sentry.origin": "manual", + "sentry.source": "custom", + "thread.id": ANY, + "thread.name": ANY, } @@ -547,7 +236,7 @@ def test_update_current_span(sentry_init, capture_events): events = capture_events() - with sentry_sdk.start_transaction(name="test-transaction"): + with sentry_sdk.start_span(name="test-root-span"): with start_span(op="test-span-op", name="test-span-name"): sentry_sdk.update_current_span( op="updated-span-op", @@ -582,6 +271,10 @@ def test_update_current_span(sentry_init, capture_events): "key0": "value0", "key1": "updated-value-4", "key2": "value2", - "thread.id": mock.ANY, - "thread.name": mock.ANY, + "sentry.name": "updated-span-name-3", + "sentry.op": "updated-span-op-2", + "sentry.origin": "manual", + "sentry.source": "custom", + "thread.id": ANY, + "thread.name": ANY, } diff --git a/tests/tracing/test_noop_span.py b/tests/tracing/test_noop_span.py deleted file mode 100644 index 36778cd485..0000000000 --- a/tests/tracing/test_noop_span.py +++ /dev/null @@ -1,52 +0,0 @@ -import sentry_sdk -from sentry_sdk.tracing import NoOpSpan - -# These tests make sure that the examples from the documentation [1] -# are working when OTel (OpenTelemetry) instrumentation is turned on, -# and therefore, the Sentry tracing should not do anything. -# -# 1: https://docs.sentry.io/platforms/python/performance/instrumentation/custom-instrumentation/ - - -def test_noop_start_transaction(sentry_init): - sentry_init(instrumenter="otel") - - with sentry_sdk.start_transaction( - op="task", name="test_transaction_name" - ) as transaction: - assert isinstance(transaction, NoOpSpan) - assert sentry_sdk.get_current_scope().span is transaction - - transaction.name = "new name" - - -def test_noop_start_span(sentry_init): - sentry_init(instrumenter="otel") - - with sentry_sdk.start_span(op="http", name="GET /") as span: - assert isinstance(span, NoOpSpan) - assert sentry_sdk.get_current_scope().span is span - - span.set_tag("http.response.status_code", 418) - span.set_data("http.entity_type", "teapot") - - -def test_noop_transaction_start_child(sentry_init): - sentry_init(instrumenter="otel") - - transaction = sentry_sdk.start_transaction(name="task") - assert isinstance(transaction, NoOpSpan) - - with transaction.start_child(op="child_task") as child: - assert isinstance(child, NoOpSpan) - assert sentry_sdk.get_current_scope().span is child - - -def test_noop_span_start_child(sentry_init): - sentry_init(instrumenter="otel") - span = sentry_sdk.start_span(name="task") - assert isinstance(span, NoOpSpan) - - with span.start_child(op="child_task") as child: - assert isinstance(child, NoOpSpan) - assert sentry_sdk.get_current_scope().span is child diff --git a/tests/tracing/test_propagation.py b/tests/tracing/test_propagation.py deleted file mode 100644 index 730bf2672b..0000000000 --- a/tests/tracing/test_propagation.py +++ /dev/null @@ -1,40 +0,0 @@ -import sentry_sdk -import pytest - - -def test_standalone_span_iter_headers(sentry_init): - sentry_init(enable_tracing=True) - - with sentry_sdk.start_span(op="test") as span: - with pytest.raises(StopIteration): - # We should not have any propagation headers - next(span.iter_headers()) - - -def test_span_in_span_iter_headers(sentry_init): - sentry_init(enable_tracing=True) - - with sentry_sdk.start_span(op="test"): - with sentry_sdk.start_span(op="test2") as span_inner: - with pytest.raises(StopIteration): - # We should not have any propagation headers - next(span_inner.iter_headers()) - - -def test_span_in_transaction(sentry_init): - sentry_init(enable_tracing=True) - - with sentry_sdk.start_transaction(op="test"): - with sentry_sdk.start_span(op="test2") as span: - # Ensure the headers are there - next(span.iter_headers()) - - -def test_span_in_span_in_transaction(sentry_init): - sentry_init(enable_tracing=True) - - with sentry_sdk.start_transaction(op="test"): - with sentry_sdk.start_span(op="test2"): - with sentry_sdk.start_span(op="test3") as span_inner: - # Ensure the headers are there - next(span_inner.iter_headers()) diff --git a/tests/tracing/test_sample_rand.py b/tests/tracing/test_sample_rand.py index f9c10aa04e..fe9f61716d 100644 --- a/tests/tracing/test_sample_rand.py +++ b/tests/tracing/test_sample_rand.py @@ -5,7 +5,7 @@ import pytest import sentry_sdk -from sentry_sdk.tracing_utils import Baggage +from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME @pytest.mark.parametrize("sample_rand", (0.0, 0.25, 0.5, 0.75)) @@ -22,9 +22,9 @@ def test_deterministic_sampled(sentry_init, capture_events, sample_rate, sample_ with mock.patch( "sentry_sdk.tracing_utils.Random.uniform", return_value=sample_rand ): - with sentry_sdk.start_transaction() as transaction: + with sentry_sdk.start_span() as root_span: assert ( - transaction.get_baggage().sentry_items["sample_rand"] + root_span.get_baggage().sentry_items["sample_rand"] == f"{sample_rand:.6f}" # noqa: E231 ) @@ -41,16 +41,20 @@ def test_transaction_uses_incoming_sample_rand( """ Test that the transaction uses the sample_rand value from the incoming baggage. """ - baggage = Baggage(sentry_items={"sample_rand": f"{sample_rand:.6f}"}) # noqa: E231 - sentry_init(traces_sample_rate=sample_rate) events = capture_events() - with sentry_sdk.start_transaction(baggage=baggage) as transaction: - assert ( - transaction.get_baggage().sentry_items["sample_rand"] - == f"{sample_rand:.6f}" # noqa: E231 - ) + baggage = f"sentry-sample_rand={sample_rand:.6f},sentry-trace_id=771a43a4192642f0b136d5159a501700" # noqa: E231 + sentry_trace = "771a43a4192642f0b136d5159a501700-1234567890abcdef" + + with sentry_sdk.continue_trace( + {BAGGAGE_HEADER_NAME: baggage, SENTRY_TRACE_HEADER_NAME: sentry_trace} + ): + with sentry_sdk.start_span() as root_span: + assert ( + root_span.get_baggage().sentry_items["sample_rand"] + == f"{sample_rand:.6f}" # noqa: E231 + ) # Transaction event captured if sample_rand < sample_rate, indicating that # sample_rand is used to make the sampling decision. @@ -77,13 +81,95 @@ def test_decimal_context(sentry_init, capture_events): with mock.patch( "sentry_sdk.tracing_utils.Random.uniform", return_value=0.123456789 ): - with sentry_sdk.start_transaction() as transaction: - assert ( - transaction.get_baggage().sentry_items["sample_rand"] == "0.123456" - ) + with sentry_sdk.start_span() as root_span: + assert root_span.get_baggage().sentry_items["sample_rand"] == "0.123456" finally: decimal.getcontext().prec = old_prec decimal.getcontext().traps[Inexact] = old_inexact decimal.getcontext().traps[FloatOperation] = old_float_operation assert len(events) == 1 + + +@pytest.mark.parametrize( + "incoming_sample_rand,expected_sample_rand", + ( + ("0.0100015", "0.0100015"), + ("0.1", "0.1"), + ), +) +def test_unexpected_incoming_sample_rand_precision( + sentry_init, capture_events, incoming_sample_rand, expected_sample_rand +): + """ + Test that incoming sample_rand is correctly interpreted even if it looks unexpected. + + We shouldn't be getting arbitrary precision sample_rand in incoming headers, + but if we do for some reason, check that we don't tamper with it. + """ + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + baggage = f"sentry-sample_rand={incoming_sample_rand},sentry-trace_id=771a43a4192642f0b136d5159a501700" # noqa: E231 + sentry_trace = "771a43a4192642f0b136d5159a501700-1234567890abcdef" + + with sentry_sdk.continue_trace( + {BAGGAGE_HEADER_NAME: baggage, SENTRY_TRACE_HEADER_NAME: sentry_trace} + ): + with sentry_sdk.start_span() as root_span: + assert ( + root_span.get_baggage().sentry_items["sample_rand"] + == expected_sample_rand + ) + + assert len(events) == 1 + + +@pytest.mark.parametrize( + "incoming_sample_rand", + ("abc", "null", "47"), +) +def test_invalid_incoming_sample_rand(sentry_init, incoming_sample_rand): + """Test that we handle malformed incoming sample_rand.""" + sentry_init(traces_sample_rate=1.0) + + baggage = f"sentry-sample_rand={incoming_sample_rand},sentry-trace_id=771a43a4192642f0b136d5159a501700" # noqa: E231 + sentry_trace = "771a43a4192642f0b136d5159a501700-1234567890abcdef" + + with sentry_sdk.continue_trace( + {BAGGAGE_HEADER_NAME: baggage, SENTRY_TRACE_HEADER_NAME: sentry_trace} + ): + with sentry_sdk.start_span(): + pass + + # The behavior here is undefined since we got a broken incoming trace, + # so as long as the SDK doesn't produce an error we consider this + # testcase a success. + + +@pytest.mark.parametrize("incoming", ((0.0, "true"), (1.0, "false"))) +def test_invalid_incoming_sampled_and_sample_rate(sentry_init, incoming): + """ + Test that we don't error out in case we can't generate a sample_rand that + would respect the incoming sampled and sample_rate. + """ + sentry_init(traces_sample_rate=1.0) + + sample_rate, sampled = incoming + + baggage = ( + f"sentry-sample_rate={sample_rate}," # noqa: E231 + f"sentry-sampled={sampled}," # noqa: E231 + "sentry-trace_id=771a43a4192642f0b136d5159a501700" + ) + sentry_trace = f"771a43a4192642f0b136d5159a501700-1234567890abcdef-{1 if sampled == 'true' else 0}" + + with sentry_sdk.continue_trace( + {BAGGAGE_HEADER_NAME: baggage, SENTRY_TRACE_HEADER_NAME: sentry_trace} + ): + with sentry_sdk.start_span(): + pass + + # The behavior here is undefined since we got a broken incoming trace, + # so as long as the SDK doesn't produce an error we consider this + # testcase a success. diff --git a/tests/tracing/test_sample_rand_propagation.py b/tests/tracing/test_sample_rand_propagation.py index ea3ea548ff..17bf7a6168 100644 --- a/tests/tracing/test_sample_rand_propagation.py +++ b/tests/tracing/test_sample_rand_propagation.py @@ -7,37 +7,38 @@ """ from unittest import mock -from unittest.mock import Mock import sentry_sdk -def test_continue_trace_with_sample_rand(): +def test_continue_trace_with_sample_rand(sentry_init): """ Test that an incoming sample_rand is propagated onto the transaction's baggage. """ + sentry_init() + headers = { - "sentry-trace": "00000000000000000000000000000000-0000000000000000-0", + "sentry-trace": "771a43a4192642f0b136d5159a501700-1234567890abcdef-0", "baggage": "sentry-sample_rand=0.1,sentry-sample_rate=0.5", } - transaction = sentry_sdk.continue_trace(headers) - assert transaction.get_baggage().sentry_items["sample_rand"] == "0.1" + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span(name="root-span") as root_span: + assert root_span.get_baggage().sentry_items["sample_rand"] == "0.1" -def test_continue_trace_missing_sample_rand(): +def test_continue_trace_missing_sample_rand(sentry_init): """ Test that a missing sample_rand is filled in onto the transaction's baggage. """ + sentry_init() headers = { - "sentry-trace": "00000000000000000000000000000000-0000000000000000", + "sentry-trace": "771a43a4192642f0b136d5159a501700-1234567890abcdef", "baggage": "sentry-placeholder=asdf", } - mock_uniform = Mock(return_value=0.5) - - with mock.patch("sentry_sdk.tracing_utils.Random.uniform", mock_uniform): - transaction = sentry_sdk.continue_trace(headers) - - assert transaction.get_baggage().sentry_items["sample_rand"] == "0.500000" + with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.5): + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span(name="root-span") as root_span: + assert root_span.get_baggage().sentry_items["sample_rand"] == "0.500000" diff --git a/tests/tracing/test_sampling.py b/tests/tracing/test_sampling.py index 1761a3dbac..4cda2f1283 100644 --- a/tests/tracing/test_sampling.py +++ b/tests/tracing/test_sampling.py @@ -5,36 +5,35 @@ import pytest import sentry_sdk -from sentry_sdk import start_span, start_transaction, capture_exception -from sentry_sdk.tracing import Transaction -from sentry_sdk.tracing_utils import Baggage +from sentry_sdk import start_span, capture_exception +from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME from sentry_sdk.utils import logger -def test_sampling_decided_only_for_transactions(sentry_init, capture_events): +def test_sampling_decided_only_for_root_spans(sentry_init): sentry_init(traces_sample_rate=0.5) - with start_transaction(name="hi") as transaction: - assert transaction.sampled is not None + with start_span(name="outer1") as root_span1: + assert root_span1.sampled is not None - with start_span() as span: - assert span.sampled == transaction.sampled + with start_span(name="inner") as span: + assert span.sampled == root_span1.sampled - with start_span() as span: - assert span.sampled is None + with start_span(name="outer2") as root_span2: + assert root_span2.sampled is not None @pytest.mark.parametrize("sampled", [True, False]) -def test_nested_transaction_sampling_override(sentry_init, sampled): +def test_nested_span_sampling_override(sentry_init, sampled): sentry_init(traces_sample_rate=1.0) - with start_transaction(name="outer", sampled=sampled) as outer_transaction: - assert outer_transaction.sampled is sampled - with start_transaction( - name="inner", sampled=(not sampled) - ) as inner_transaction: - assert inner_transaction.sampled is not sampled - assert outer_transaction.sampled is sampled + with start_span(name="outer", sampled=sampled) as outer_span: + assert outer_span.sampled is sampled + with start_span(name="inner", sampled=(not sampled)) as inner_span: + # won't work because the child span inherits the sampling decision + # from the parent + assert inner_span.sampled is sampled + assert outer_span.sampled is sampled def test_no_double_sampling(sentry_init, capture_events): @@ -43,26 +42,12 @@ def test_no_double_sampling(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0, sample_rate=0.0) events = capture_events() - with start_transaction(name="/"): + with start_span(name="/"): pass assert len(events) == 1 -@pytest.mark.parametrize("sampling_decision", [True, False]) -def test_get_transaction_and_span_from_scope_regardless_of_sampling_decision( - sentry_init, sampling_decision -): - sentry_init(traces_sample_rate=1.0) - - with start_transaction(name="/", sampled=sampling_decision): - with start_span(op="child-span"): - with start_span(op="child-child-span"): - scope = sentry_sdk.get_current_scope() - assert scope.span.op == "child-child-span" - assert scope.transaction.name == "/" - - @pytest.mark.parametrize( "traces_sample_rate,expected_decision", [(0.0, False), (0.25, False), (0.75, True), (1.00, True)], @@ -74,9 +59,14 @@ def test_uses_traces_sample_rate_correctly( ): sentry_init(traces_sample_rate=traces_sample_rate) - baggage = Baggage(sentry_items={"sample_rand": "0.500000"}) - transaction = start_transaction(name="dogpark", baggage=baggage) - assert transaction.sampled is expected_decision + with sentry_sdk.continue_trace( + { + BAGGAGE_HEADER_NAME: "sentry-sample_rand=0.500000,sentry-trace_id=397f36434d07b20135324b2e6ae70c77", + SENTRY_TRACE_HEADER_NAME: "397f36434d07b20135324b2e6ae70c77-1234567890abcdef", + } + ): + with start_span(name="dogpark") as root_span: + assert root_span.sampled is expected_decision @pytest.mark.parametrize( @@ -90,9 +80,14 @@ def test_uses_traces_sampler_return_value_correctly( ): sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value)) - baggage = Baggage(sentry_items={"sample_rand": "0.500000"}) - transaction = start_transaction(name="dogpark", baggage=baggage) - assert transaction.sampled is expected_decision + with sentry_sdk.continue_trace( + { + BAGGAGE_HEADER_NAME: "sentry-sample_rand=0.500000,sentry-trace_id=397f36434d07b20135324b2e6ae70c77", + SENTRY_TRACE_HEADER_NAME: "397f36434d07b20135324b2e6ae70c77-1234567890abcdef", + } + ): + with start_span(name="dogpark") as root_span: + assert root_span.sampled is expected_decision @pytest.mark.parametrize("traces_sampler_return_value", [True, False]) @@ -101,8 +96,8 @@ def test_tolerates_traces_sampler_returning_a_boolean( ): sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value)) - transaction = start_transaction(name="dogpark") - assert transaction.sampled is traces_sampler_return_value + with start_span(name="dogpark") as span: + assert span.sampled is traces_sampler_return_value @pytest.mark.parametrize("sampling_decision", [True, False]) @@ -112,8 +107,8 @@ def test_only_captures_transaction_when_sampled_is_true( sentry_init(traces_sampler=mock.Mock(return_value=sampling_decision)) events = capture_events() - transaction = start_transaction(name="dogpark") - transaction.finish() + with start_span(name="dogpark"): + pass assert len(events) == (1 if sampling_decision else 0) @@ -134,9 +129,9 @@ def test_prefers_traces_sampler_to_traces_sample_rate( traces_sampler=traces_sampler, ) - transaction = start_transaction(name="dogpark") - assert traces_sampler.called is True - assert transaction.sampled is traces_sampler_return_value + with start_span(name="dogpark") as span: + assert traces_sampler.called is True + assert span.sampled is traces_sampler_return_value @pytest.mark.parametrize("parent_sampling_decision", [True, False]) @@ -148,10 +143,17 @@ def test_ignores_inherited_sample_decision_when_traces_sampler_defined( traces_sampler = mock.Mock(return_value=not parent_sampling_decision) sentry_init(traces_sampler=traces_sampler) - transaction = start_transaction( - name="dogpark", parent_sampled=parent_sampling_decision + sentry_trace_header = ( + "12312012123120121231201212312012-1121201211212012-{sampled}".format( + sampled=int(parent_sampling_decision) + ) ) - assert transaction.sampled is not parent_sampling_decision + + with sentry_sdk.continue_trace({"sentry-trace": sentry_trace_header}): + with sentry_sdk.start_span(name="dogpark") as span: + pass + + assert span.sampled is not parent_sampling_decision @pytest.mark.parametrize("explicit_decision", [True, False]) @@ -163,8 +165,8 @@ def test_traces_sampler_doesnt_overwrite_explicitly_passed_sampling_decision( traces_sampler = mock.Mock(return_value=not explicit_decision) sentry_init(traces_sampler=traces_sampler) - transaction = start_transaction(name="dogpark", sampled=explicit_decision) - assert transaction.sampled is explicit_decision + with start_span(name="dogpark", sampled=explicit_decision) as span: + assert span.sampled is explicit_decision @pytest.mark.parametrize("parent_sampling_decision", [True, False]) @@ -177,18 +179,26 @@ def test_inherits_parent_sampling_decision_when_traces_sampler_undefined( sentry_init(traces_sample_rate=0.5) mock_random_value = 0.25 if parent_sampling_decision is False else 0.75 - with mock.patch.object(random, "random", return_value=mock_random_value): - transaction = start_transaction( - name="dogpark", parent_sampled=parent_sampling_decision + sentry_trace_header = ( + "12312012123120121231201212312012-1121201211212012-{sampled}".format( + sampled=int(parent_sampling_decision) ) - assert transaction.sampled is parent_sampling_decision + ) + with mock.patch.object(random, "random", return_value=mock_random_value): + with sentry_sdk.continue_trace({"sentry-trace": sentry_trace_header}): + with start_span(name="dogpark") as span: + assert span.sampled is parent_sampling_decision @pytest.mark.parametrize("parent_sampling_decision", [True, False]) def test_passes_parent_sampling_decision_in_sampling_context( sentry_init, parent_sampling_decision ): - sentry_init(traces_sample_rate=1.0) + def dummy_traces_sampler(sampling_context): + assert sampling_context["parent_sampled"] is parent_sampling_decision + return 1.0 + + sentry_init(traces_sample_rate=1.0, traces_sampler=dummy_traces_sampler) sentry_trace_header = ( "12312012123120121231201212312012-1121201211212012-{sampled}".format( @@ -196,32 +206,26 @@ def test_passes_parent_sampling_decision_in_sampling_context( ) ) - transaction = Transaction.continue_from_headers( - headers={"sentry-trace": sentry_trace_header}, name="dogpark" - ) - - def mock_set_initial_sampling_decision(_, sampling_context): - assert "parent_sampled" in sampling_context - assert sampling_context["parent_sampled"] is parent_sampling_decision + with sentry_sdk.continue_trace({"sentry-trace": sentry_trace_header}): + with sentry_sdk.start_span(name="dogpark"): + pass - with mock.patch( - "sentry_sdk.tracing.Transaction._set_initial_sampling_decision", - mock_set_initial_sampling_decision, - ): - start_transaction(transaction=transaction) +def test_passes_custom_attributes_in_sampling_context(sentry_init): + def traces_sampler(sampling_context): + assert sampling_context["dog.name"] == "Lily" + assert sampling_context["dog.activity"] == "fetch" + return 1.0 -def test_passes_custom_sampling_context_from_start_transaction_to_traces_sampler( - sentry_init, DictionaryContaining # noqa: N803 -): - traces_sampler = mock.Mock() sentry_init(traces_sampler=traces_sampler) - start_transaction(custom_sampling_context={"dogs": "yes", "cats": "maybe"}) - - traces_sampler.assert_any_call( - DictionaryContaining({"dogs": "yes", "cats": "maybe"}) - ) + with sentry_sdk.continue_trace( + {"sentry-trace": "12312012123120121231201212312012-1121201211212012-1"} + ): + with sentry_sdk.start_span( + name="dogpark", attributes={"dog.name": "Lily", "dog.activity": "fetch"} + ): + pass def test_sample_rate_affects_errors(sentry_init, capture_events): @@ -256,9 +260,11 @@ def test_warns_and_sets_sampled_to_false_on_invalid_traces_sampler_return_value( sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value)) with mock.patch.object(logger, "warning", mock.Mock()): - transaction = start_transaction(name="dogpark") - logger.warning.assert_any_call(StringContaining("Given sample rate is invalid")) - assert transaction.sampled is False + with start_span(name="dogpark") as span: + logger.warning.assert_any_call( + StringContaining("Given sample rate is invalid") + ) + assert span.sampled is False @pytest.mark.parametrize( @@ -283,9 +289,8 @@ def test_records_lost_event_only_if_traces_sample_rate_enabled( sentry_init(traces_sample_rate=traces_sample_rate) record_lost_event_calls = capture_record_lost_event_calls() - transaction = start_transaction(name="dogpark") - assert transaction.sampled is sampled_output - transaction.finish() + with start_span(name="dogpark") as span: + assert span.sampled is sampled_output # Use Counter because order of calls does not matter assert Counter(record_lost_event_calls) == Counter(expected_record_lost_event_calls) @@ -310,12 +315,52 @@ def test_records_lost_event_only_if_traces_sampler_enabled( sampled_output, expected_record_lost_event_calls, ): - sentry_init(traces_sampler=traces_sampler) + sentry_init( + traces_sample_rate=None, + traces_sampler=traces_sampler, + ) record_lost_event_calls = capture_record_lost_event_calls() - transaction = start_transaction(name="dogpark") - assert transaction.sampled is sampled_output - transaction.finish() + with start_span(name="dogpark") as span: + assert span.sampled is sampled_output # Use Counter because order of calls does not matter assert Counter(record_lost_event_calls) == Counter(expected_record_lost_event_calls) + + +@pytest.mark.parametrize("parent_sampling_decision", [True, False]) +def test_profiles_sampler_gets_sampling_context(sentry_init, parent_sampling_decision): + def dummy_profiles_sampler(sampling_context): + assert sampling_context["transaction_context"] == { + "name": "dogpark", + "op": "op", + "source": "custom", + } + assert sampling_context["parent_sampled"] == parent_sampling_decision + return 1.0 + + sentry_init(traces_sample_rate=1.0, profiles_sampler=dummy_profiles_sampler) + + sentry_trace = "12312012123120121231201212312012-1121201211212012-{}".format( + int(parent_sampling_decision) + ) + with sentry_sdk.continue_trace({"sentry-trace": sentry_trace}): + with sentry_sdk.start_span(name="dogpark", op="op"): + pass + + +def test_passes_custom_attributes_in_profiles_sampling_context(sentry_init): + def profiles_sampler(sampling_context): + assert sampling_context["dog.name"] == "Lily" + assert sampling_context["dog.activity"] == "fetch" + return 1.0 + + sentry_init(traces_sample_rate=1.0, profiles_sampler=profiles_sampler) + + with sentry_sdk.continue_trace( + {"sentry-trace": "12312012123120121231201212312012-1121201211212012-1"} + ): + with sentry_sdk.start_span( + name="dogpark", attributes={"dog.name": "Lily", "dog.activity": "fetch"} + ): + pass diff --git a/tests/tracing/test_span_name.py b/tests/tracing/test_span_name.py index 9c1768990a..d7d3772727 100644 --- a/tests/tracing/test_span_name.py +++ b/tests/tracing/test_span_name.py @@ -1,27 +1,11 @@ -import pytest - import sentry_sdk -def test_start_span_description(sentry_init, capture_events): - sentry_init(traces_sample_rate=1.0) - events = capture_events() - - with sentry_sdk.start_transaction(name="hi"): - with pytest.deprecated_call(): - with sentry_sdk.start_span(op="foo", description="span-desc"): - ... - - (event,) = events - - assert event["spans"][0]["description"] == "span-desc" - - def test_start_span_name(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() - with sentry_sdk.start_transaction(name="hi"): + with sentry_sdk.start_span(name="hi"): with sentry_sdk.start_span(op="foo", name="span-name"): ... @@ -30,26 +14,11 @@ def test_start_span_name(sentry_init, capture_events): assert event["spans"][0]["description"] == "span-name" -def test_start_child_description(sentry_init, capture_events): - sentry_init(traces_sample_rate=1.0) - events = capture_events() - - with sentry_sdk.start_transaction(name="hi"): - with pytest.deprecated_call(): - with sentry_sdk.start_span(op="foo", description="span-desc") as span: - with span.start_child(op="bar", description="child-desc"): - ... - - (event,) = events - - assert event["spans"][-1]["description"] == "child-desc" - - def test_start_child_name(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() - with sentry_sdk.start_transaction(name="hi"): + with sentry_sdk.start_span(name="hi"): with sentry_sdk.start_span(op="foo", name="span-name") as span: with span.start_child(op="bar", name="child-name"): ... diff --git a/tests/tracing/test_span_origin.py b/tests/tracing/test_span_origin.py index 16635871b3..38be1b37d9 100644 --- a/tests/tracing/test_span_origin.py +++ b/tests/tracing/test_span_origin.py @@ -1,11 +1,12 @@ -from sentry_sdk import start_transaction, start_span +import pytest +from sentry_sdk import start_span def test_span_origin_manual(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() - with start_transaction(name="hi"): + with start_span(name="hi"): with start_span(op="foo", name="bar"): pass @@ -20,11 +21,11 @@ def test_span_origin_custom(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() - with start_transaction(name="hi"): + with start_span(name="hi"): with start_span(op="foo", name="bar", origin="foo.foo2.foo3"): pass - with start_transaction(name="ho", origin="ho.ho2.ho3"): + with start_span(name="ho", origin="ho.ho2.ho3"): with start_span(op="baz", name="qux", origin="baz.baz2.baz3"): pass @@ -36,3 +37,118 @@ def test_span_origin_custom(sentry_init, capture_events): assert second_transaction["contexts"]["trace"]["origin"] == "ho.ho2.ho3" assert second_transaction["spans"][0]["origin"] == "baz.baz2.baz3" + + +@pytest.mark.parametrize("excluded_origins", [None, [], "noop"]) +def test_exclude_span_origins_empty(sentry_init, capture_events, excluded_origins): + if excluded_origins in (None, []): + sentry_init(traces_sample_rate=1.0, exclude_span_origins=excluded_origins) + elif excluded_origins == "noop": + sentry_init( + traces_sample_rate=1.0, + # default is None + ) + + events = capture_events() + + with start_span(name="span1"): + pass + with start_span(name="span2", origin="auto.http.requests"): + pass + with start_span(name="span3", origin="auto.db.postgres"): + pass + + assert len(events) == 3 + + +@pytest.mark.parametrize( + "excluded_origins,origins,expected_allowed_origins", + [ + # Regexes + ( + [r"auto\.http\..*", r"auto\.db\..*"], + [ + "auto.http.requests", + "auto.db.sqlite", + "manual", + ], + ["manual"], + ), + # Substring matching + ( + ["http"], + [ + "auto.http.requests", + "http.client", + "my.http.integration", + "manual", + "auto.db.postgres", + ], + ["manual", "auto.db.postgres"], + ), + # Mix and match + ( + ["manual", r"auto\.http\..*", "db"], + [ + "manual", + "auto.http.requests", + "auto.db.postgres", + "auto.grpc.server", + ], + ["auto.grpc.server"], + ), + ], +) +def test_exclude_span_origins_patterns( + sentry_init, + capture_events, + excluded_origins, + origins, + expected_allowed_origins, +): + sentry_init( + traces_sample_rate=1.0, + exclude_span_origins=excluded_origins, + ) + + events = capture_events() + + for origin in origins: + with start_span(name="span", origin=origin): + pass + + assert len(events) == len(expected_allowed_origins) + + if len(expected_allowed_origins) > 0: + captured_origins = {event["contexts"]["trace"]["origin"] for event in events} + assert captured_origins == set(expected_allowed_origins) + + +def test_exclude_span_origins_with_child_spans(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0, exclude_span_origins=[r"auto\.http\..*"]) + events = capture_events() + + with start_span(name="parent", origin="manual"): + with start_span(name="http-child", origin="auto.http.requests"): + pass + with start_span(name="db-child", origin="auto.db.postgres"): + pass + + assert len(events) == 1 + assert events[0]["contexts"]["trace"]["origin"] == "manual" + assert len(events[0]["spans"]) == 1 + assert events[0]["spans"][0]["origin"] == "auto.db.postgres" + + +def test_exclude_span_origins_parent_with_child_spans(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0, exclude_span_origins=[r"auto\.http\..*"]) + events = capture_events() + + with start_span(name="parent", origin="auto.http.requests"): + with start_span( + name="db-child", origin="auto.db.postgres", only_as_child_span=True + ): + # Note: without only_as_child_span, the child span would be promoted to a transaction + pass + + assert len(events) == 0 diff --git a/tests/tracing/test_span_tags.py b/tests/tracing/test_span_tags.py new file mode 100644 index 0000000000..3803149aa6 --- /dev/null +++ b/tests/tracing/test_span_tags.py @@ -0,0 +1,78 @@ +import pytest +import sentry_sdk + + +@pytest.mark.parametrize( + ("key", "value", "expected"), + [ + ("int", 123, "123"), + ("float", 123.456, "123.456"), + ("bool_true", True, "True"), + ("bool_false", False, "False"), + ("list", [1, 2, 3], "(1, 2, 3)"), + ("dict", {"key": "value"}, '{"key": "value"}'), + ("already_string", "test", "test"), + ], +) +def test_span_set_tag_converts_to_string( + sentry_init, capture_events, key, value, expected +): + """Test that Span.set_tag converts various types to strings.""" + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + with sentry_sdk.start_span() as span: + span.set_tag(key, value) + + (event,) = events + + assert event["tags"][key] == expected, f"Tag {key} was not converted properly" + + +def test_span_set_tag_handles_conversion_failure(sentry_init, capture_events): + """Test that Span.set_tag handles objects that fail to convert to string, + but have a valid __repr__.""" + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + # Create an object that raises an exception when str() is called. + # This needs to be a subclass of something that otel supports as an + # attribute value (e.g. int). + class BadObject(int): + def __str__(self): + raise NotImplementedError("Cannot convert to string") + + def __repr__(self): + return "BadObject()" + + with sentry_sdk.start_span() as span: + span.set_tag("bad_object", BadObject()) + + (event,) = events + + assert event["tags"]["bad_object"] == "BadObject()" + + +def test_span_set_tag_handles_broken_repr(sentry_init, capture_events): + """Test that Span.set_tag handles objects with broken __str__ and __repr__.""" + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + # Create an object that raises exceptions for both __str__ and __repr__ + class BadObject(int): + def __str__(self): + raise NotImplementedError("Cannot convert to string") + + def __repr__(self): + raise NotImplementedError("Cannot get representation") + + bad_obj = BadObject() + + # This should not raise an exception + with sentry_sdk.start_span() as span: + span.set_tag("bad_object", bad_obj) + + # The tag should be set to a fallback value + (event,) = events + + assert event["tags"]["bad_object"] == "" diff --git a/tests/tracing/test_trace_propagation.py b/tests/tracing/test_trace_propagation.py new file mode 100644 index 0000000000..7fff8a8caa --- /dev/null +++ b/tests/tracing/test_trace_propagation.py @@ -0,0 +1,292 @@ +import pytest +import requests +import sentry_sdk +from http.client import HTTPConnection + +from tests.conftest import create_mock_http_server + +USE_DEFAULT_TRACES_SAMPLE_RATE = -1 + +INCOMING_TRACE_ID = "771a43a4192642f0b136d5159a501700" +INCOMING_HEADERS = { + "sentry-trace": f"{INCOMING_TRACE_ID}-1234567890abcdef", + "baggage": ( + f"sentry-trace_id={INCOMING_TRACE_ID}, " + "sentry-public_key=frontendpublickey," + "sentry-sample_rate=0.01337," + "sentry-release=myfrontend," + "sentry-environment=bird," + "sentry-transaction=bar" + ), +} + +PORT = create_mock_http_server() + + +# Proper high level testing for trace propagation. +# Testing the matrix of test cases described here: +# https://develop.sentry.dev/sdk/telemetry/traces/trace-propagation-cheat-sheet/ + + +@pytest.fixture +def _mock_putheader(monkeypatch): + """ + Mock HTTPConnection.putheader to capture calls to it. + """ + putheader_calls = [] + original_putheader = HTTPConnection.putheader + + def mock_putheader_fn(self, header, value): + putheader_calls.append((header, value)) + return original_putheader(self, header, value) + + monkeypatch.setattr(HTTPConnection, "putheader", mock_putheader_fn) + return putheader_calls + + +@pytest.mark.parametrize( + "traces_sample_rate", + [ + USE_DEFAULT_TRACES_SAMPLE_RATE, + None, + 0, + 1, + ], + ids=[ + "traces_sample_rate=DEFAULT", + "traces_sample_rate=None", + "traces_sample_rate=0", + "traces_sample_rate=1", + ], +) +def test_no_incoming_trace_and_trace_propagation_targets_matching( + sentry_init, capture_events, _mock_putheader, traces_sample_rate +): + init_kwargs = {} + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: + init_kwargs["traces_sample_rate"] = traces_sample_rate + sentry_init(**init_kwargs) + + events = capture_events() + + NO_INCOMING_HEADERS = {} # noqa: N806 + + with sentry_sdk.continue_trace(NO_INCOMING_HEADERS): + with sentry_sdk.start_span(op="test", name="test"): + requests.get(f"http://localhost:{PORT}") # noqa:E231 + + # CHECK if performance data (a transaction/span) is sent to Sentry + if traces_sample_rate == 1: + assert len(events) == 1 + else: + assert len(events) == 0 + + outgoing_request_headers = {key: value for key, value in _mock_putheader} + + # CHECK if trace information is added to the outgoing request + assert "sentry-trace" in outgoing_request_headers + assert "baggage" in outgoing_request_headers + + # CHECK if incoming trace is continued + # (no assert necessary, because there is no incoming trace information) + + +@pytest.mark.parametrize( + "traces_sample_rate", + [ + USE_DEFAULT_TRACES_SAMPLE_RATE, + None, + 0, + 1, + ], + ids=[ + "traces_sample_rate=DEFAULT", + "traces_sample_rate=None", + "traces_sample_rate=0", + "traces_sample_rate=1", + ], +) +def test_no_incoming_trace_and_trace_propagation_targets_not_matching( + sentry_init, capture_events, _mock_putheader, traces_sample_rate +): + init_kwargs = { + "trace_propagation_targets": [ + "http://someothersite.com", + ], + } + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: + init_kwargs["traces_sample_rate"] = traces_sample_rate + sentry_init(**init_kwargs) + + events = capture_events() + + NO_INCOMING_HEADERS = {} # noqa: N806 + + with sentry_sdk.continue_trace(NO_INCOMING_HEADERS): + with sentry_sdk.start_span(op="test", name="test"): + requests.get(f"http://localhost:{PORT}") # noqa:E231 + + # CHECK if performance data (a transaction/span) is sent to Sentry + if traces_sample_rate == 1: + assert len(events) == 1 + else: + assert len(events) == 0 + + outgoing_request_headers = {key: value for key, value in _mock_putheader} + + # CHECK if trace information is added to the outgoing request + assert "sentry-trace" not in outgoing_request_headers + assert "baggage" not in outgoing_request_headers + + # CHECK if incoming trace is continued + # (no assert necessary, because there is no incoming trace information, and no outgoing trace information either) + + +@pytest.mark.parametrize( + "traces_sample_rate", + [ + USE_DEFAULT_TRACES_SAMPLE_RATE, + None, + 0, + 1, + ], + ids=[ + "traces_sample_rate=DEFAULT", + "traces_sample_rate=None", + "traces_sample_rate=0", + "traces_sample_rate=1", + ], +) +@pytest.mark.parametrize( + "incoming_parent_sampled", + ["deferred", "1", "0"], + ids=[ + "incoming_parent_sampled=DEFERRED", + "incoming_parent_sampled=1", + "incoming_parent_sampled=0", + ], +) +def test_with_incoming_trace_and_trace_propagation_targets_matching( + sentry_init, + capture_events, + _mock_putheader, + incoming_parent_sampled, + traces_sample_rate, +): + init_kwargs = {} + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: + init_kwargs["traces_sample_rate"] = traces_sample_rate + sentry_init(**init_kwargs) + + events = capture_events() + + incoming_headers = INCOMING_HEADERS.copy() + if incoming_parent_sampled != "deferred": + incoming_headers["sentry-trace"] += f"-{incoming_parent_sampled}" + incoming_headers[ + "baggage" + ] += f',sentry-sampled={"true" if incoming_parent_sampled == "1" else "false"}' # noqa: E231 + + with sentry_sdk.continue_trace(incoming_headers): + with sentry_sdk.start_span(op="test", name="test"): + requests.get(f"http://localhost:{PORT}") # noqa:E231 + + # CHECK if performance data (a transaction/span) is sent to Sentry + if ( + traces_sample_rate is None + or traces_sample_rate == USE_DEFAULT_TRACES_SAMPLE_RATE + or incoming_parent_sampled == "0" + ): + assert len(events) == 0 + else: + if incoming_parent_sampled == "1" or traces_sample_rate == 1: + assert len(events) == 1 + else: + assert len(events) == 0 + + outgoing_request_headers = {key: value for key, value in _mock_putheader} + + # CHECK if trace information is added to the outgoing request + assert "sentry-trace" in outgoing_request_headers + assert "baggage" in outgoing_request_headers + + # CHECK if incoming trace is continued + # Always continue the incoming trace, no matter traces_sample_rate + assert INCOMING_TRACE_ID in outgoing_request_headers["sentry-trace"] + assert INCOMING_TRACE_ID in outgoing_request_headers["baggage"] + + +@pytest.mark.parametrize( + "traces_sample_rate", + [ + USE_DEFAULT_TRACES_SAMPLE_RATE, + None, + 0, + 1, + ], + ids=[ + "traces_sample_rate=DEFAULT", + "traces_sample_rate=None", + "traces_sample_rate=0", + "traces_sample_rate=1", + ], +) +@pytest.mark.parametrize( + "incoming_parent_sampled", + ["deferred", "1", "0"], + ids=[ + "incoming_parent_sampled=DEFERRED", + "incoming_parent_sampled=1", + "incoming_parent_sampled=0", + ], +) +def test_with_incoming_trace_and_trace_propagation_targets_not_matching( + sentry_init, + capture_events, + _mock_putheader, + incoming_parent_sampled, + traces_sample_rate, +): + init_kwargs = { + "trace_propagation_targets": [ + "http://someothersite.com", + ], + } + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: + init_kwargs["traces_sample_rate"] = traces_sample_rate + sentry_init(**init_kwargs) + + events = capture_events() + + incoming_headers = INCOMING_HEADERS.copy() + if incoming_parent_sampled != "deferred": + incoming_headers["sentry-trace"] += f"-{incoming_parent_sampled}" + incoming_headers[ + "baggage" + ] += f',sentry-sampled={"true" if incoming_parent_sampled == "1" else "false"}' # noqa: E231 + + with sentry_sdk.continue_trace(incoming_headers): + with sentry_sdk.start_span(op="test", name="test"): + requests.get(f"http://localhost:{PORT}") # noqa:E231 + + # CHECK if performance data (a transaction/span) is sent to Sentry + if ( + traces_sample_rate is None + or traces_sample_rate == USE_DEFAULT_TRACES_SAMPLE_RATE + or incoming_parent_sampled == "0" + ): + assert len(events) == 0 + else: + if incoming_parent_sampled == "1" or traces_sample_rate == 1: + assert len(events) == 1 + else: + assert len(events) == 0 + + outgoing_request_headers = {key: value for key, value in _mock_putheader} + + # CHECK if trace information is added to the outgoing request + assert "sentry-trace" not in outgoing_request_headers + assert "baggage" not in outgoing_request_headers + + # CHECK if incoming trace is continued + # (no assert necessary, because the trace information is not added to the outgoing request (see previous asserts)) diff --git a/tox.ini b/tox.ini index 16067de8c7..ac35660ccb 100644 --- a/tox.ini +++ b/tox.ini @@ -10,18 +10,15 @@ # The file (and all resulting CI YAMLs) then need to be regenerated via # "scripts/generate-test-files.sh". # -# Last generated: 2025-07-29T06:07:22.069934+00:00 +# Last generated: 2025-07-30T08:02:45.313432+00:00 [tox] requires = # This version introduced using pip 24.1 which does not work with older Celery and HTTPX versions. virtualenv<20.26.3 envlist = - # === Common === - {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common - # === Gevent === - {py3.6,py3.8,py3.10,py3.11,py3.12}-gevent + {py3.8,py3.10,py3.11,py3.12}-gevent # === Integrations === # General format is {pythonversion}-{integrationname}-v{frameworkversion} @@ -55,24 +52,24 @@ envlist = {py3.8,py3.11}-beam-latest # Boto3 - {py3.6,py3.7}-boto3-v{1.12} + {py3.7}-boto3-v{1.12} {py3.7,py3.11,py3.12}-boto3-v{1.23} {py3.11,py3.12}-boto3-v{1.34} {py3.11,py3.12,py3.13}-boto3-latest # Chalice - {py3.6,py3.9}-chalice-v{1.16} + {py3.7,py3.9}-chalice-v{1.16} {py3.8,py3.12,py3.13}-chalice-latest # Cloud Resource Context - {py3.6,py3.12,py3.13}-cloud_resource_context + {py3.7,py3.12,py3.13}-cloud_resource_context # GCP {py3.7}-gcp # HTTPX - {py3.6,py3.9}-httpx-v{0.16,0.18} - {py3.6,py3.10}-httpx-v{0.20,0.22} + {py3.7,py3.9}-httpx-v{0.16,0.18} + {py3.7,py3.10}-httpx-v{0.20,0.22} {py3.7,py3.11,py3.12}-httpx-v{0.23,0.24} {py3.9,py3.11,py3.12}-httpx-v{0.25,0.27} {py3.9,py3.12,py3.13}-httpx-latest @@ -90,14 +87,8 @@ envlist = {py3.9,py3.11,py3.12}-openai-latest {py3.9,py3.11,py3.12}-openai-notiktoken - # OpenTelemetry (OTel) - {py3.7,py3.9,py3.12,py3.13}-opentelemetry - - # OpenTelemetry Experimental (POTel) - {py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-potel - # pure_eval - {py3.6,py3.12,py3.13}-pure_eval + {py3.7,py3.12,py3.13}-pure_eval # Quart {py3.7,py3.11}-quart-v{0.16} @@ -109,24 +100,22 @@ envlist = {py3.10,py3.11}-ray-latest # Redis - {py3.6,py3.8}-redis-v{3} + {py3.7,py3.8}-redis-v{3} {py3.7,py3.8,py3.11}-redis-v{4} {py3.7,py3.11,py3.12}-redis-v{5} - {py3.7,py3.12,py3.13}-redis-latest # Requests - {py3.6,py3.8,py3.12,py3.13}-requests + {py3.7,py3.8,py3.12,py3.13}-requests # RQ (Redis Queue) - {py3.6}-rq-v{0.6} - {py3.6,py3.9}-rq-v{0.13,1.0} - {py3.6,py3.11}-rq-v{1.5,1.10} + {py3.7,py3.9}-rq-v{0.13,1.0} + {py3.7,py3.11}-rq-v{1.5,1.10} {py3.7,py3.11,py3.12}-rq-v{1.15,1.16} {py3.7,py3.12,py3.13}-rq-latest # Sanic - {py3.6,py3.7}-sanic-v{0.8} - {py3.6,py3.8}-sanic-v{20} + {py3.7}-sanic-v{0.8} + {py3.8}-sanic-v{20} {py3.8,py3.11,py3.12}-sanic-v{24.6} {py3.9,py3.12,py3.13}-sanic-latest @@ -134,6 +123,13 @@ envlist = # These come from the populate_tox.py script. Eventually we should move all # integration tests there. + # ~~~ Common ~~~ + {py3.7,py3.8,py3.9}-common-v1.4.1 + {py3.7,py3.8,py3.9,py3.10,py3.11}-common-v1.15.0 + {py3.8,py3.9,py3.10,py3.11,py3.12}-common-v1.26.0 + {py3.9,py3.10,py3.11,py3.12,py3.13}-common-v1.36.0 + + # ~~~ AI ~~~ {py3.8,py3.11,py3.12}-anthropic-v0.16.0 {py3.8,py3.11,py3.12}-anthropic-v0.31.2 @@ -147,30 +143,29 @@ envlist = {py3.10,py3.11,py3.12}-openai_agents-v0.0.19 {py3.10,py3.12,py3.13}-openai_agents-v0.1.0 - {py3.10,py3.12,py3.13}-openai_agents-v0.2.3 + {py3.10,py3.12,py3.13}-openai_agents-v0.2.4 {py3.8,py3.10,py3.11}-huggingface_hub-v0.22.2 {py3.8,py3.11,py3.12}-huggingface_hub-v0.26.5 {py3.8,py3.12,py3.13}-huggingface_hub-v0.30.2 - {py3.8,py3.12,py3.13}-huggingface_hub-v0.34.2 + {py3.8,py3.12,py3.13}-huggingface_hub-v0.34.3 {py3.8,py3.12,py3.13}-huggingface_hub-v0.35.0rc0 # ~~~ DBs ~~~ {py3.7,py3.11,py3.12}-clickhouse_driver-v0.2.9 - {py3.6}-pymongo-v3.5.1 - {py3.6,py3.10,py3.11}-pymongo-v3.13.0 - {py3.6,py3.9,py3.10}-pymongo-v4.0.2 + {py3.7}-pymongo-v3.7.2 + {py3.7,py3.10,py3.11}-pymongo-v3.13.0 + {py3.7,py3.9,py3.10}-pymongo-v4.0.2 {py3.9,py3.12,py3.13}-pymongo-v4.13.2 - {py3.6}-redis_py_cluster_legacy-v1.3.6 - {py3.6,py3.7}-redis_py_cluster_legacy-v2.0.0 - {py3.6,py3.7,py3.8}-redis_py_cluster_legacy-v2.1.3 + {py3.7}-redis_py_cluster_legacy-v2.0.0 + {py3.7,py3.8}-redis_py_cluster_legacy-v2.1.3 - {py3.6,py3.8,py3.9}-sqlalchemy-v1.3.24 - {py3.6,py3.11,py3.12}-sqlalchemy-v1.4.54 - {py3.7,py3.12,py3.13}-sqlalchemy-v2.0.41 + {py3.7,py3.8,py3.9}-sqlalchemy-v1.3.24 + {py3.7,py3.11,py3.12}-sqlalchemy-v1.4.54 + {py3.7,py3.12,py3.13}-sqlalchemy-v2.0.42 # ~~~ Flags ~~~ @@ -199,11 +194,11 @@ envlist = {py3.8,py3.11,py3.12}-ariadne-v0.24.0 {py3.9,py3.12,py3.13}-ariadne-v0.26.2 - {py3.6,py3.9,py3.10}-gql-v3.4.1 + {py3.7,py3.9,py3.10}-gql-v3.4.1 {py3.7,py3.11,py3.12}-gql-v3.5.3 {py3.9,py3.12,py3.13}-gql-v4.0.0b0 - {py3.6,py3.9,py3.10}-graphene-v3.3 + {py3.7,py3.9,py3.10}-graphene-v3.3 {py3.8,py3.12,py3.13}-graphene-v3.4.3 {py3.8,py3.10,py3.11}-strawberry-v0.209.8 @@ -220,19 +215,19 @@ envlist = # ~~~ Tasks ~~~ - {py3.6,py3.7,py3.8}-celery-v4.4.7 - {py3.6,py3.7,py3.8}-celery-v5.0.5 + {py3.8}-celery-v4.4.7 + {py3.8}-celery-v5.0.5 {py3.8,py3.12,py3.13}-celery-v5.5.3 - {py3.6,py3.7}-dramatiq-v1.9.0 - {py3.6,py3.8,py3.9}-dramatiq-v1.12.3 + {py3.7}-dramatiq-v1.9.0 + {py3.7,py3.8,py3.9}-dramatiq-v1.12.3 {py3.7,py3.10,py3.11}-dramatiq-v1.15.0 {py3.9,py3.12,py3.13}-dramatiq-v1.18.0 - {py3.6,py3.7}-huey-v2.1.3 - {py3.6,py3.7}-huey-v2.2.0 - {py3.6,py3.7}-huey-v2.3.2 - {py3.6,py3.11,py3.12}-huey-v2.5.3 + {py3.7}-huey-v2.1.3 + {py3.7}-huey-v2.2.0 + {py3.7}-huey-v2.3.2 + {py3.7,py3.11,py3.12}-huey-v2.5.3 {py3.8,py3.9}-spark-v3.0.3 {py3.8,py3.10,py3.11}-spark-v3.5.6 @@ -240,24 +235,24 @@ envlist = # ~~~ Web 1 ~~~ - {py3.6,py3.7}-django-v1.11.29 - {py3.6,py3.8,py3.9}-django-v2.2.28 - {py3.6,py3.9,py3.10}-django-v3.2.25 + {py3.7}-django-v2.0.13 + {py3.7,py3.8,py3.9}-django-v2.2.28 + {py3.7,py3.9,py3.10}-django-v3.2.25 {py3.8,py3.11,py3.12}-django-v4.2.23 {py3.10,py3.11,py3.12}-django-v5.0.14 {py3.10,py3.12,py3.13}-django-v5.2.4 - {py3.6,py3.7,py3.8}-flask-v1.1.4 + {py3.7,py3.8}-flask-v1.1.4 {py3.8,py3.12,py3.13}-flask-v2.3.3 {py3.8,py3.12,py3.13}-flask-v3.0.3 {py3.9,py3.12,py3.13}-flask-v3.1.1 - {py3.6,py3.9,py3.10}-starlette-v0.16.0 + {py3.7,py3.9,py3.10}-starlette-v0.16.0 {py3.7,py3.10,py3.11}-starlette-v0.26.1 {py3.8,py3.11,py3.12}-starlette-v0.36.3 {py3.9,py3.12,py3.13}-starlette-v0.47.2 - {py3.6,py3.9,py3.10}-fastapi-v0.79.1 + {py3.7,py3.9,py3.10}-fastapi-v0.79.1 {py3.7,py3.10,py3.11}-fastapi-v0.91.0 {py3.7,py3.10,py3.11}-fastapi-v0.103.2 {py3.8,py3.12,py3.13}-fastapi-v0.116.1 @@ -269,12 +264,11 @@ envlist = {py3.8,py3.12,py3.13}-aiohttp-v3.10.11 {py3.9,py3.12,py3.13}-aiohttp-v3.12.15 - {py3.6,py3.7}-bottle-v0.12.25 + {py3.7}-bottle-v0.12.25 {py3.8,py3.12,py3.13}-bottle-v0.13.4 - {py3.6}-falcon-v1.4.1 - {py3.6,py3.7}-falcon-v2.0.0 - {py3.6,py3.11,py3.12}-falcon-v3.1.3 + {py3.7,py3.8,py3.9}-falcon-v3.0.1 + {py3.7,py3.11,py3.12}-falcon-v3.1.3 {py3.8,py3.11,py3.12}-falcon-v4.0.2 {py3.8,py3.11,py3.12}-falcon-v4.1.0a3 @@ -283,27 +277,25 @@ envlist = {py3.8,py3.11,py3.12}-litestar-v2.10.0 {py3.8,py3.12,py3.13}-litestar-v2.16.0 - {py3.6}-pyramid-v1.8.6 - {py3.6,py3.8,py3.9}-pyramid-v1.10.8 - {py3.6,py3.10,py3.11}-pyramid-v2.0.2 + {py3.7,py3.8,py3.9}-pyramid-v1.10.8 + {py3.7,py3.10,py3.11}-pyramid-v2.0.2 {py3.8,py3.10,py3.11}-starlite-v1.48.1 {py3.8,py3.10,py3.11}-starlite-v1.49.0 {py3.8,py3.10,py3.11}-starlite-v1.50.2 {py3.8,py3.10,py3.11}-starlite-v1.51.16 - {py3.6,py3.7,py3.8}-tornado-v6.0.4 + {py3.7,py3.8}-tornado-v6.0.4 {py3.7,py3.9,py3.10}-tornado-v6.2 {py3.8,py3.10,py3.11}-tornado-v6.4.2 {py3.9,py3.12,py3.13}-tornado-v6.5.1 # ~~~ Misc ~~~ - {py3.6,py3.12,py3.13}-loguru-v0.7.3 + {py3.7,py3.12,py3.13}-loguru-v0.7.3 - {py3.6}-trytond-v4.6.22 - {py3.6}-trytond-v4.8.18 - {py3.6,py3.7,py3.8}-trytond-v5.8.16 + {py3.7}-trytond-v5.0.63 + {py3.7,py3.8}-trytond-v5.8.16 {py3.8,py3.10,py3.11}-trytond-v6.8.17 {py3.8,py3.11,py3.12}-trytond-v7.0.34 {py3.9,py3.12,py3.13}-trytond-v7.6.4 @@ -323,22 +315,13 @@ deps = linters: -r requirements-linting.txt linters: werkzeug<2.3.0 - # === Common === - py3.8-common: hypothesis - common: pytest-asyncio - # See https://github.com/pytest-dev/pytest/issues/9621 - # and https://github.com/pytest-dev/pytest-forked/issues/67 - # for justification of the upper bound on pytest - {py3.6,py3.7}-common: pytest<7.0.0 - {py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common: pytest - # === Gevent === - {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-gevent: gevent>=22.10.0, <22.11.0 + {py3.7,py3.8,py3.9,py3.10,py3.11}-gevent: gevent>=22.10.0, <22.11.0 {py3.12}-gevent: gevent # See https://github.com/pytest-dev/pytest/issues/9621 # and https://github.com/pytest-dev/pytest-forked/issues/67 # for justification of the upper bound on pytest - {py3.6,py3.7}-gevent: pytest<7.0.0 + py3.7-gevent: pytest<7.0.0 {py3.8,py3.9,py3.10,py3.11,py3.12}-gevent: pytest gevent: pytest-asyncio {py3.10,py3.11}-gevent: zope.event<5.0.0 @@ -434,12 +417,6 @@ deps = openai-latest: tiktoken~=0.6.0 openai-notiktoken: openai - # OpenTelemetry (OTel) - opentelemetry: opentelemetry-distro - - # OpenTelemetry Experimental (POTel) - potel: -e .[opentelemetry-experimental] - # pure_eval pure_eval: pure_eval @@ -464,25 +441,22 @@ deps = # Redis redis: fakeredis!=1.7.4 redis: pytest<8.0.0 - {py3.6,py3.7,py3.8}-redis: fakeredis<2.26.0 + {py3.7,py3.8}-redis: fakeredis<2.26.0 {py3.7,py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-redis: pytest-asyncio redis-v3: redis~=3.0 redis-v4: redis~=4.0 redis-v5: redis~=5.0 - redis-latest: redis # Requests requests: requests>=2.0 # RQ (Redis Queue) # https://github.com/jamesls/fakeredis/issues/245 - rq-v{0.6}: fakeredis<1.0 - rq-v{0.6}: redis<3.2.2 rq-v{0.13,1.0,1.5,1.10}: fakeredis>=1.0,<1.7.4 rq-v{1.15,1.16}: fakeredis<2.28.0 - {py3.6,py3.7}-rq-v{1.15,1.16}: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 + py3.7-rq-v{1.15,1.16}: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 rq-latest: fakeredis<2.28.0 - {py3.6,py3.7}-rq-latest: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 + py3.7-rq-latest: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 rq-v0.6: rq~=0.6.0 rq-v0.13: rq~=0.13.0 rq-v1.0: rq~=1.0.0 @@ -508,6 +482,17 @@ deps = # These come from the populate_tox.py script. Eventually we should move all # integration tests there. + # ~~~ Common ~~~ + common-v1.4.1: opentelemetry-sdk==1.4.1 + common-v1.15.0: opentelemetry-sdk==1.15.0 + common-v1.26.0: opentelemetry-sdk==1.26.0 + common-v1.36.0: opentelemetry-sdk==1.36.0 + common: pytest + common: pytest-asyncio + py3.7-common: pytest<7.0.0 + py3.8-common: hypothesis + + # ~~~ AI ~~~ anthropic-v0.16.0: anthropic==0.16.0 anthropic-v0.31.2: anthropic==0.31.2 @@ -525,32 +510,31 @@ deps = openai_agents-v0.0.19: openai-agents==0.0.19 openai_agents-v0.1.0: openai-agents==0.1.0 - openai_agents-v0.2.3: openai-agents==0.2.3 + openai_agents-v0.2.4: openai-agents==0.2.4 openai_agents: pytest-asyncio huggingface_hub-v0.22.2: huggingface_hub==0.22.2 huggingface_hub-v0.26.5: huggingface_hub==0.26.5 huggingface_hub-v0.30.2: huggingface_hub==0.30.2 - huggingface_hub-v0.34.2: huggingface_hub==0.34.2 + huggingface_hub-v0.34.3: huggingface_hub==0.34.3 huggingface_hub-v0.35.0rc0: huggingface_hub==0.35.0rc0 # ~~~ DBs ~~~ clickhouse_driver-v0.2.9: clickhouse-driver==0.2.9 - pymongo-v3.5.1: pymongo==3.5.1 + pymongo-v3.7.2: pymongo==3.7.2 pymongo-v3.13.0: pymongo==3.13.0 pymongo-v4.0.2: pymongo==4.0.2 pymongo-v4.13.2: pymongo==4.13.2 pymongo: mockupdb - redis_py_cluster_legacy-v1.3.6: redis-py-cluster==1.3.6 redis_py_cluster_legacy-v2.0.0: redis-py-cluster==2.0.0 redis_py_cluster_legacy-v2.1.3: redis-py-cluster==2.1.3 sqlalchemy-v1.3.24: sqlalchemy==1.3.24 sqlalchemy-v1.4.54: sqlalchemy==1.4.54 - sqlalchemy-v2.0.41: sqlalchemy==2.0.41 + sqlalchemy-v2.0.42: sqlalchemy==2.0.42 # ~~~ Flags ~~~ @@ -622,7 +606,6 @@ deps = celery-v5.5.3: celery==5.5.3 celery: newrelic celery: redis - py3.7-celery: importlib-metadata<5.0 dramatiq-v1.9.0: dramatiq==1.9.0 dramatiq-v1.12.3: dramatiq==1.12.3 @@ -640,16 +623,18 @@ deps = # ~~~ Web 1 ~~~ - django-v1.11.29: django==1.11.29 + django-v2.0.13: django==2.0.13 django-v2.2.28: django==2.2.28 django-v3.2.25: django==3.2.25 django-v4.2.23: django==4.2.23 django-v5.0.14: django==5.0.14 django-v5.2.4: django==5.2.4 + django: channels[daphne] django: psycopg2-binary django: djangorestframework django: pytest-django django: Werkzeug + django-v2.0.13: channels[daphne] django-v2.2.28: channels[daphne] django-v3.2.25: channels[daphne] django-v4.2.23: channels[daphne] @@ -660,13 +645,13 @@ deps = django-v4.2.23: pytest-asyncio django-v5.0.14: pytest-asyncio django-v5.2.4: pytest-asyncio - django-v1.11.29: djangorestframework>=3.0,<4.0 - django-v1.11.29: Werkzeug<2.1.0 + django-v2.0.13: djangorestframework>=3.0,<4.0 + django-v2.0.13: Werkzeug<2.1.0 django-v2.2.28: djangorestframework>=3.0,<4.0 django-v2.2.28: Werkzeug<2.1.0 django-v3.2.25: djangorestframework>=3.0,<4.0 django-v3.2.25: Werkzeug<2.1.0 - django-v1.11.29: pytest-django<4.0 + django-v2.0.13: pytest-django<4.0 django-v2.2.28: pytest-django<4.0 flask-v1.1.4: flask==1.1.4 @@ -721,8 +706,7 @@ deps = bottle-v0.13.4: bottle==0.13.4 bottle: werkzeug<2.1.0 - falcon-v1.4.1: falcon==1.4.1 - falcon-v2.0.0: falcon==2.0.0 + falcon-v3.0.1: falcon==3.0.1 falcon-v3.1.3: falcon==3.1.3 falcon-v4.0.2: falcon==4.0.2 falcon-v4.1.0a3: falcon==4.1.0a3 @@ -738,7 +722,6 @@ deps = litestar-v2.0.1: httpx<0.28 litestar-v2.5.5: httpx<0.28 - pyramid-v1.8.6: pyramid==1.8.6 pyramid-v1.10.8: pyramid==1.10.8 pyramid-v2.0.2: pyramid==2.0.2 pyramid: werkzeug<2.1.0 @@ -767,15 +750,13 @@ deps = # ~~~ Misc ~~~ loguru-v0.7.3: loguru==0.7.3 - trytond-v4.6.22: trytond==4.6.22 - trytond-v4.8.18: trytond==4.8.18 + trytond-v5.0.63: trytond==5.0.63 trytond-v5.8.16: trytond==5.8.16 trytond-v6.8.17: trytond==6.8.17 trytond-v7.0.34: trytond==7.0.34 trytond-v7.6.4: trytond==7.6.4 trytond: werkzeug - trytond-v4.6.22: werkzeug<1.0 - trytond-v4.8.18: werkzeug<1.0 + trytond-v5.0.63: werkzeug<1.0 typer-v0.15.4: typer==0.15.4 typer-v0.16.0: typer==0.16.0 @@ -786,9 +767,9 @@ setenv = PYTHONDONTWRITEBYTECODE=1 OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES COVERAGE_FILE=.coverage-sentry-{envname} - py3.6: COVERAGE_RCFILE=.coveragerc36 django: DJANGO_SETTINGS_MODULE=tests.integrations.django.myapp.settings + py3.12-django: PIP_CONSTRAINT=constraints.txt spark-v{3.0.3,3.5.6}: JAVA_HOME=/usr/lib/jvm/temurin-11-jdk-amd64 common: TESTPATH=tests @@ -827,8 +808,6 @@ setenv = openai: TESTPATH=tests/integrations/openai openai_agents: TESTPATH=tests/integrations/openai_agents openfeature: TESTPATH=tests/integrations/openfeature - opentelemetry: TESTPATH=tests/integrations/opentelemetry - potel: TESTPATH=tests/integrations/opentelemetry pure_eval: TESTPATH=tests/integrations/pure_eval pymongo: TESTPATH=tests/integrations/pymongo pyramid: TESTPATH=tests/integrations/pyramid @@ -866,7 +845,6 @@ extras = pymongo: pymongo basepython = - py3.6: python3.6 py3.7: python3.7 py3.8: python3.8 py3.9: python3.9