diff --git a/.coveragerc36 b/.coveragerc36 deleted file mode 100644 index 8642882ab1..0000000000 --- a/.coveragerc36 +++ /dev/null @@ -1,14 +0,0 @@ -# This is the coverage.py config for Python 3.6 -# The config for newer Python versions is in pyproject.toml. - -[run] -branch = true -omit = - /tmp/* - */tests/* - */.venv/* - - -[report] -exclude_lines = - if TYPE_CHECKING: diff --git a/.github/workflows/test-integrations-ai.yml b/.github/workflows/test-integrations-ai.yml index bc89cb9afe..fc7d551249 100644 --- a/.github/workflows/test-integrations-ai.yml +++ b/.github/workflows/test-integrations-ai.yml @@ -29,18 +29,11 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.9","3.11","3.12"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + python-version: ["3.7","3.9","3.11","3.12"] os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -70,14 +63,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-huggingface_hub-latest" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml @@ -105,17 +92,10 @@ jobs: fail-fast: false matrix: python-version: ["3.8","3.9","3.10","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -145,14 +125,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-huggingface_hub" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-cloud.yml b/.github/workflows/test-integrations-cloud.yml index 7763aa509d..341e531e31 100644 --- a/.github/workflows/test-integrations-cloud.yml +++ b/.github/workflows/test-integrations-cloud.yml @@ -30,21 +30,14 @@ jobs: fail-fast: false matrix: python-version: ["3.8","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-22.04] services: docker: image: docker:dind # Required for Docker network management options: --privileged # Required for Docker-in-Docker operations - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -74,14 +67,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-gcp-latest" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml @@ -108,22 +95,15 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + python-version: ["3.7","3.8","3.9","3.11","3.12","3.13"] os: [ubuntu-22.04] services: docker: image: docker:dind # Required for Docker network management options: --privileged # Required for Docker-in-Docker operations - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -153,14 +133,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-gcp" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-common.yml b/.github/workflows/test-integrations-common.yml index 864583532d..59524f2d93 100644 --- a/.github/workflows/test-integrations-common.yml +++ b/.github/workflows/test-integrations-common.yml @@ -29,18 +29,11 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -54,14 +47,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-common" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-dbs.yml b/.github/workflows/test-integrations-dbs.yml index 815b550027..390d579094 100644 --- a/.github/workflows/test-integrations-dbs.yml +++ b/.github/workflows/test-integrations-dbs.yml @@ -30,10 +30,6 @@ jobs: fail-fast: false matrix: python-version: ["3.7","3.8","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-22.04] services: postgres: @@ -50,15 +46,12 @@ jobs: ports: - 5432:5432 env: - SENTRY_PYTHON_TEST_POSTGRES_HOST: ${{ matrix.python-version == '3.6' && 'postgres' || 'localhost' }} + SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -94,14 +87,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-sqlalchemy-latest" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml @@ -128,11 +115,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] services: postgres: @@ -149,15 +132,12 @@ jobs: ports: - 5432:5432 env: - SENTRY_PYTHON_TEST_POSTGRES_HOST: ${{ matrix.python-version == '3.6' && 'postgres' || 'localhost' }} + SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -193,14 +173,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-sqlalchemy" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-flags.yml b/.github/workflows/test-integrations-flags.yml index e28067841b..ffcef6e799 100644 --- a/.github/workflows/test-integrations-flags.yml +++ b/.github/workflows/test-integrations-flags.yml @@ -30,17 +30,10 @@ jobs: fail-fast: false matrix: python-version: ["3.7","3.8","3.9","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -66,14 +59,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-unleash" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-gevent.yml b/.github/workflows/test-integrations-gevent.yml index 41a77ffe34..76c70d8ac7 100644 --- a/.github/workflows/test-integrations-gevent.yml +++ b/.github/workflows/test-integrations-gevent.yml @@ -29,18 +29,11 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.8","3.10","3.11","3.12"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + python-version: ["3.8","3.10","3.11","3.12"] os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -54,14 +47,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-gevent" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-graphql.yml b/.github/workflows/test-integrations-graphql.yml index b741302de6..02ccf1804c 100644 --- a/.github/workflows/test-integrations-graphql.yml +++ b/.github/workflows/test-integrations-graphql.yml @@ -29,18 +29,11 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -66,14 +59,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-strawberry" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-misc.yml b/.github/workflows/test-integrations-misc.yml index 7da9929435..6198241fb0 100644 --- a/.github/workflows/test-integrations-misc.yml +++ b/.github/workflows/test-integrations-misc.yml @@ -29,18 +29,11 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + python-version: ["3.7","3.8","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -74,14 +67,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-typer" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-network.yml b/.github/workflows/test-integrations-network.yml index 43b5e4a6a5..f7c2dc5ed7 100644 --- a/.github/workflows/test-integrations-network.yml +++ b/.github/workflows/test-integrations-network.yml @@ -30,17 +30,10 @@ jobs: fail-fast: false matrix: python-version: ["3.9","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -62,14 +55,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-requests-latest" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml @@ -96,18 +83,11 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -129,14 +109,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-requests" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-tasks.yml b/.github/workflows/test-integrations-tasks.yml index a6850256b2..6d4fdfeb6b 100644 --- a/.github/workflows/test-integrations-tasks.yml +++ b/.github/workflows/test-integrations-tasks.yml @@ -30,17 +30,10 @@ jobs: fail-fast: false matrix: python-version: ["3.7","3.8","3.10","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -84,14 +77,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-spark-latest" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml @@ -118,18 +105,11 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -173,14 +153,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-spark" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-web-1.yml b/.github/workflows/test-integrations-web-1.yml index b40027ddc7..14188fb047 100644 --- a/.github/workflows/test-integrations-web-1.yml +++ b/.github/workflows/test-integrations-web-1.yml @@ -29,11 +29,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] services: postgres: @@ -50,15 +46,12 @@ jobs: ports: - 5432:5432 env: - SENTRY_PYTHON_TEST_POSTGRES_HOST: ${{ matrix.python-version == '3.6' && 'postgres' || 'localhost' }} + SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -84,14 +77,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-fastapi" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.github/workflows/test-integrations-web-2.yml b/.github/workflows/test-integrations-web-2.yml index 1fbff47b65..624b46bf9a 100644 --- a/.github/workflows/test-integrations-web-2.yml +++ b/.github/workflows/test-integrations-web-2.yml @@ -30,17 +30,10 @@ jobs: fail-fast: false matrix: python-version: ["3.8","3.9","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -90,14 +83,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-tornado-latest" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml @@ -124,18 +111,11 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12","3.13"] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + python-version: ["3.7","3.8","3.9","3.10","3.11","3.12","3.13"] os: [ubuntu-22.04] - # Use Docker container only for Python 3.6 - container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - if: ${{ matrix.python-version != '3.6' }} with: python-version: ${{ matrix.python-version }} allow-prereleases: true @@ -185,14 +165,8 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-tornado" - - name: Generate coverage XML (Python 3.6) - if: ${{ !cancelled() && matrix.python-version == '3.6' }} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - name: Generate coverage XML - if: ${{ !cancelled() && matrix.python-version != '3.6' }} + if: ${{ !cancelled() }} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/.gitignore b/.gitignore index 0dad53b2f4..4401dd6bfc 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,7 @@ *.db *.pid .python-version +.tool-versions .coverage .coverage-sentry* coverage.xml diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 53396a37ba..187b3aaeb0 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -1,13 +1,193 @@ -# Sentry SDK 2.0 Migration Guide - -Looking to upgrade from Sentry SDK 1.x to 2.x? Here's a comprehensive list of what's changed. Looking for a more digestable summary? See the [guide in the docs](https://docs.sentry.io/platforms/python/migration/1.x-to-2.x) with the most common migration patterns. - -## New Features +# Sentry SDK Migration Guide + +## Upgrading to 3.0 + +Looking to upgrade from Sentry SDK 2.x to 3.x? Here's a comprehensive list of what's changed. Looking for a more digestible summary? See the [guide in the docs](https://docs.sentry.io/platforms/python/migration/2.x-to-3.x) with the most common migration patterns. + +### New Features + +### Changed + +- The SDK now supports Python 3.7 and higher. +- The default of `traces_sample_rate` changed to `0`. Meaning: Incoming traces will be continued by default. For example, if your frontend sends a `sentry-trace/baggage` headers pair, your SDK will create Spans and send them to Sentry. (The default used to be `None` meaning by default no Spans where created, no matter what headers the frontend sent to your project.) See also: https://docs.sentry.io/platforms/python/configuration/options/#traces_sample_rate +- `sentry_sdk.start_span` now only takes keyword arguments. +- `sentry_sdk.start_transaction`/`sentry_sdk.start_span` no longer takes the following arguments: `span`, `parent_sampled`, `trace_id`, `span_id` or `parent_span_id`. +- You can no longer change the sampled status of a span with `span.sampled = False` after starting it. +- The `Span()` constructor does not accept a `hub` parameter anymore. +- `Span.finish()` does not accept a `hub` parameter anymore. +- `Span.finish()` no longer returns the `event_id` if the event is sent to sentry. +- The `Profile()` constructor does not accept a `hub` parameter anymore. +- A `Profile` object does not have a `.hub` property anymore. +- `MAX_PROFILE_DURATION_NS`, `PROFILE_MINIMUM_SAMPLES`, `Profile`, `Scheduler`, `ThreadScheduler`, `GeventScheduler`, `has_profiling_enabled`, `setup_profiler`, `teardown_profiler` are no longer accessible from `sentry_sdk.profiler`. They're still accessible from `sentry_sdk.profiler.transaction_profiler`. +- `DEFAULT_SAMPLING_FREQUENCY`, `MAX_STACK_DEPTH`, `get_frame_name`, `extract_frame`, `extract_stack`, `frame_id` are no longer accessible from `sentry_sdk.profiler`. They're still accessible from `sentry_sdk.profiler.utils`. +- `sentry_sdk.continue_trace` no longer returns a `Transaction` and is now a context manager. +- Redis integration: In Redis pipeline spans there is no `span["data"]["redis.commands"]` that contains a dict `{"count": 3, "first_ten": ["cmd1", "cmd2", ...]}` but instead `span["data"]["redis.commands.count"]` (containing `3`) and `span["data"]["redis.commands.first_ten"]` (containing `["cmd1", "cmd2", ...]`). +- clickhouse-driver integration: The query is now available under the `db.query.text` span attribute (only if `send_default_pii` is `True`). +- `sentry_sdk.init` now returns `None` instead of a context manager. +- The `sampling_context` argument of `traces_sampler` and `profiles_sampler` now additionally contains all span attributes known at span start. +- We updated how we handle `ExceptionGroup`s. You will now get more data if ExceptionGroups are appearing in chained exceptions. It could happen that after updating the SDK the grouping of issues change because of this. So eventually you will see the same exception in two Sentry issues (one from before the update, one from after the update) +- The integration for Python `logging` module does not send Sentry issues by default anymore when calling `logging.error()`, `logging.critical()` or `logging.exception()`. If you want to preserve the old behavior use `sentry_sdk.init(integrations=[LoggingIntegration(event_level="ERROR")])`. +- The `SentrySpanProcessor` and `SentryPropagator` are exported from `sentry_sdk.opentelemetry` instead of `sentry_sdk.integrations.opentelemetry`. +- The integration-specific content of the `sampling_context` argument of `traces_sampler` and `profiles_sampler` now looks different. + - The Celery integration doesn't add the `celery_job` dictionary anymore. Instead, the individual keys are now available as: + + | Dictionary keys | Sampling context key | Example | + | ---------------------- | --------------------------- | ------------------------------ | + | `celery_job["args"]` | `celery.job.args.{index}` | `celery.job.args.0` | + | `celery_job["kwargs"]` | `celery.job.kwargs.{kwarg}` | `celery.job.kwargs.kwarg_name` | + | `celery_job["task"]` | `celery.job.task` | | + + Note that all of these are serialized, i.e., not the original `args` and `kwargs` but rather OpenTelemetry-friendly span attributes. + + - The AIOHTTP integration doesn't add the `aiohttp_request` object anymore. Instead, some of the individual properties of the request are accessible, if available, as follows: + + | Request property | Sampling context key(s) | + | ----------------- | ------------------------------- | + | `path` | `url.path` | + | `query_string` | `url.query` | + | `method` | `http.request.method` | + | `host` | `server.address`, `server.port` | + | `scheme` | `url.scheme` | + | full URL | `url.full` | + | `request.headers` | `http.request.header.{header}` | + + - The Tornado integration doesn't add the `tornado_request` object anymore. Instead, some of the individual properties of the request are accessible, if available, as follows: + + | Request property | Sampling context key(s) | + | ----------------- | --------------------------------------------------- | + | `path` | `url.path` | + | `query` | `url.query` | + | `protocol` | `url.scheme` | + | `method` | `http.request.method` | + | `host` | `server.address`, `server.port` | + | `version` | `network.protocol.name`, `network.protocol.version` | + | full URL | `url.full` | + | `request.headers` | `http.request.header.{header}` | + + - The WSGI integration doesn't add the `wsgi_environ` object anymore. Instead, the individual properties of the environment are accessible, if available, as follows: + + | Env property | Sampling context key(s) | + | ----------------- | ------------------------------------------------- | + | `PATH_INFO` | `url.path` | + | `QUERY_STRING` | `url.query` | + | `REQUEST_METHOD` | `http.request.method` | + | `SERVER_NAME` | `server.address` | + | `SERVER_PORT` | `server.port` | + | `SERVER_PROTOCOL` | `server.protocol.name`, `server.protocol.version` | + | `wsgi.url_scheme` | `url.scheme` | + | full URL | `url.full` | + | `HTTP_*` | `http.request.header.{header}` | + + - The ASGI integration doesn't add the `asgi_scope` object anymore. Instead, the individual properties of the scope, if available, are accessible as follows: + + | Scope property | Sampling context key(s) | + | -------------- | ------------------------------- | + | `type` | `network.protocol.name` | + | `scheme` | `url.scheme` | + | `path` | `url.path` | + | `query` | `url.query` | + | `http_version` | `network.protocol.version` | + | `method` | `http.request.method` | + | `server` | `server.address`, `server.port` | + | `client` | `client.address`, `client.port` | + | full URL | `url.full` | + | `headers` | `http.request.header.{header}` | + + -The RQ integration doesn't add the `rq_job` object anymore. Instead, the individual properties of the job and the queue, if available, are accessible as follows: + + | RQ property | Sampling context key | Example | + | --------------- | ---------------------------- | ---------------------- | + | `rq_job.args` | `rq.job.args.{index}` | `rq.job.args.0` | + | `rq_job.kwargs` | `rq.job.kwargs.{kwarg}` | `rq.job.args.my_kwarg` | + | `rq_job.func` | `rq.job.func` | | + | `queue.name` | `messaging.destination.name` | | + | `rq_job.id` | `messaging.message.id` | | + + Note that `rq.job.args`, `rq.job.kwargs`, and `rq.job.func` are serialized and not the actual objects on the job. + + - The AWS Lambda integration doesn't add the `aws_event` and `aws_context` objects anymore. Instead, the following, if available, is accessible: + + | AWS property | Sampling context key(s) | + | ------------------------------------------- | ------------------------------- | + | `aws_event["httpMethod"]` | `http.request.method` | + | `aws_event["queryStringParameters"]` | `url.query` | + | `aws_event["path"]` | `url.path` | + | full URL | `url.full` | + | `aws_event["headers"]["X-Forwarded-Proto"]` | `network.protocol.name` | + | `aws_event["headers"]["Host"]` | `server.address` | + | `aws_context["function_name"]` | `faas.name` | + | `aws_event["headers"]` | `http.request.headers.{header}` | + + - The GCP integration doesn't add the `gcp_env` and `gcp_event` keys anymore. Instead, the following, if available, is accessible: + + | Old sampling context key | New sampling context key | + | --------------------------------- | ------------------------------ | + | `gcp_env["function_name"]` | `faas.name` | + | `gcp_env["function_region"]` | `faas.region` | + | `gcp_env["function_project"]` | `gcp.function.project` | + | `gcp_env["function_identity"]` | `gcp.function.identity` | + | `gcp_env["function_entry_point"]` | `gcp.function.entry_point` | + | `gcp_event.method` | `http.request.method` | + | `gcp_event.query_string` | `url.query` | + | `gcp_event.headers` | `http.request.header.{header}` | + + +### Removed + +- Dropped support for Python 3.6. +- The `enable_tracing` `init` option has been removed. Configure `traces_sample_rate` directly. +- The `propagate_traces` `init` option has been removed. Use `trace_propagation_targets` instead. +- The `custom_sampling_context` parameter of `start_transaction` has been removed. Use `attributes` instead to set key-value pairs of data that should be accessible in the traces sampler. Note that span attributes need to conform to the [OpenTelemetry specification](https://opentelemetry.io/docs/concepts/signals/traces/#attributes), meaning only certain types can be set as values. +- `set_measurement` has been removed. +- The PyMongo integration no longer sets tags. The data is still accessible via span attributes. +- The PyMongo integration doesn't set `operation_ids` anymore. The individual IDs (`operation_id`, `request_id`, `session_id`) are now accessible as separate span attributes. +- `sentry_sdk.metrics` and associated metrics APIs have been removed as Sentry no longer accepts metrics data in this form. See https://sentry.zendesk.com/hc/en-us/articles/26369339769883-Upcoming-API-Changes-to-Metrics +- The experimental options `enable_metrics`, `before_emit_metric` and `metric_code_locations` have been removed. +- When setting span status, the HTTP status code is no longer automatically added as a tag. +- Class `Hub` has been removed. +- Class `_ScopeManager` has been removed. +- The context manager `auto_session_tracking()` has been removed. Use `track_session()` instead. +- The context manager `auto_session_tracking_scope()` has been removed. Use `track_session()` instead. +- Utility function `is_auto_session_tracking_enabled()` has been removed. There is no public replacement. There is a private `_is_auto_session_tracking_enabled()` (if you absolutely need this function) It accepts a `scope` parameter instead of the previously used `hub` parameter. +- Utility function `is_auto_session_tracking_enabled_scope()` has been removed. There is no public replacement. There is a private `_is_auto_session_tracking_enabled()` (if you absolutely need this function). +- Setting `scope.level` has been removed. Use `scope.set_level` instead. +- `span.containing_transaction` has been removed. Use `span.root_span` instead. +- `continue_from_headers`, `continue_from_environ` and `from_traceparent` have been removed, please use top-level API `sentry_sdk.continue_trace` instead. +- `PropagationContext` constructor no longer takes a `dynamic_sampling_context` but takes a `baggage` object instead. +- `ThreadingIntegration` no longer takes the `propagate_hub` argument. +- `Baggage.populate_from_transaction` has been removed. +- `debug.configure_debug_hub` was removed. +- `profiles_sample_rate` and `profiler_mode` were removed from options available via `_experiments`. Use the top-level `profiles_sample_rate` and `profiler_mode` options instead. +- `Transport.capture_event` has been removed. Use `Transport.capture_envelope` instead. +- Function transports are no longer supported. Subclass the `Transport` instead. +- `start_transaction` (`start_span`) no longer takes the following arguments: + - `trace_id`, `baggage`: use `continue_trace` for propagation from headers or environment variables + - `same_process_as_parent` + - `span_id` + - `parent_span_id`: you can supply a `parent_span` instead +- The `Scope.transaction` property has been removed. To obtain the root span (previously transaction), use `Scope.root_span`. To set the root span's (transaction's) name, use `Scope.set_transaction_name()`. +- Passing a list or `None` for `failed_request_status_codes` in the Starlette integration is no longer supported. Pass a set of integers instead. +- The `span` argument of `Scope.trace_propagation_meta` is no longer supported. +- Setting `Scope.user` directly is no longer supported. Use `Scope.set_user()` instead. +- Dropped support for Django versions below 2.0. +- Dropped support for trytond versions below 5.0. +- Dropped support for Falcon versions below 3.0. + +### Deprecated + +- `sentry_sdk.start_transaction()` is deprecated. Use `sentry_sdk.start_span()` instead. +- `Span.set_data()` is deprecated. Use `Span.set_attribute()` instead. + +## Upgrading to 2.0 + +Looking to upgrade from Sentry SDK 1.x to 2.x? Here's a comprehensive list of what's changed. Looking for a more digestible summary? See the [guide in the docs](https://docs.sentry.io/platforms/python/migration/1.x-to-2.x) with the most common migration patterns. + +### New Features - Additional integrations will now be activated automatically if the SDK detects the respective package is installed: Ariadne, ARQ, asyncpg, Chalice, clickhouse-driver, GQL, Graphene, huey, Loguru, PyMongo, Quart, Starlite, Strawberry. - While refactoring the [inner workings](https://docs.sentry.io/platforms/python/enriching-events/scopes/) of the SDK we added new top-level APIs for custom instrumentation called `new_scope` and `isolation_scope`. See the [Deprecated](#deprecated) section to see how they map to the existing APIs. -## Changed +### Changed - The Pyramid integration will not capture errors that might happen in `authenticated_userid()` in a custom `AuthenticationPolicy` class. - The method `need_code_loation` of the `MetricsAggregator` was renamed to `need_code_location`. @@ -59,7 +239,7 @@ Looking to upgrade from Sentry SDK 1.x to 2.x? Here's a comprehensive list of wh -## Removed +### Removed - Removed support for Python 2 and Python 3.5. The SDK now requires at least Python 3.6. - Removed support for Celery 3.\*. @@ -82,7 +262,7 @@ Looking to upgrade from Sentry SDK 1.x to 2.x? Here's a comprehensive list of wh - Removed the experimental `metrics_summary_sample_rate` config option. - Removed the experimental `should_summarize_metric` config option. -## Deprecated +### Deprecated - Using the `Hub` directly as well as using hub-based APIs has been deprecated. Where available, use [the top-level API instead](sentry_sdk/api.py); otherwise use the [scope API](sentry_sdk/scope.py) or the [client API](sentry_sdk/client.py). diff --git a/constraints.txt b/constraints.txt new file mode 100644 index 0000000000..697aca1388 --- /dev/null +++ b/constraints.txt @@ -0,0 +1,3 @@ +# Workaround for https://github.com/pypa/setuptools/issues/4519. +# Applies only for Django tests. +setuptools<72.0.0 diff --git a/docs/api.rst b/docs/api.rst index 87c2535abd..95acc70455 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -59,8 +59,4 @@ Client Management Managing Scope (advanced) ========================= -.. autofunction:: sentry_sdk.api.configure_scope -.. autofunction:: sentry_sdk.api.push_scope - .. autofunction:: sentry_sdk.api.new_scope - diff --git a/docs/apidocs.rst b/docs/apidocs.rst index a3c8a6e150..ffe265b276 100644 --- a/docs/apidocs.rst +++ b/docs/apidocs.rst @@ -2,9 +2,6 @@ API Docs ======== -.. autoclass:: sentry_sdk.Hub - :members: - .. autoclass:: sentry_sdk.Scope :members: diff --git a/docs/conf.py b/docs/conf.py index 709f557d16..5c9477b78d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -13,6 +13,8 @@ import sphinx.ext.autodoc # noqa: F401 import sphinx.ext.intersphinx # noqa: F401 import urllib3.exceptions # noqa: F401 +import importlib_metadata # noqa: F401 +import opentelemetry.sdk.metrics._internal # noqa: F401 typing.TYPE_CHECKING = True @@ -31,7 +33,7 @@ copyright = "2019-{}, Sentry Team and Contributors".format(datetime.now().year) author = "Sentry Team and Contributors" -release = "2.27.0" +release = "3.0.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/requirements-aws-lambda-layer.txt b/requirements-aws-lambda-layer.txt index 8986fdafc0..7bde8c4844 100644 --- a/requirements-aws-lambda-layer.txt +++ b/requirements-aws-lambda-layer.txt @@ -5,3 +5,5 @@ certifi # So we pin this here to make our Lambda layer work with # Lambda Function using Python 3.7+ urllib3<1.27 + +opentelemetry-distro>=0.35b0 diff --git a/requirements-testing.txt b/requirements-testing.txt index 221863f4ab..7014f49137 100644 --- a/requirements-testing.txt +++ b/requirements-testing.txt @@ -14,5 +14,6 @@ pysocks socksio httpcore[http2] setuptools +freezegun Brotli docker diff --git a/scripts/init_serverless_sdk.py b/scripts/init_serverless_sdk.py index 9b4412c420..d58605ff6f 100644 --- a/scripts/init_serverless_sdk.py +++ b/scripts/init_serverless_sdk.py @@ -50,8 +50,8 @@ def extract_and_load_lambda_function_module(self, module_path): module_name = module_path.split(os.path.sep)[-1] module_file_path = module_path + ".py" - # Supported python versions are 3.6, 3.7, 3.8 - if py_version >= (3, 6): + # Supported python versions are 3.7, 3.8 + if py_version >= (3, 7): import importlib.util spec = importlib.util.spec_from_file_location( diff --git a/scripts/populate_tox/README.md b/scripts/populate_tox/README.md index c9a3b67ba0..39bf627ea1 100644 --- a/scripts/populate_tox/README.md +++ b/scripts/populate_tox/README.md @@ -18,6 +18,7 @@ then determining which versions make sense to test to get good coverage. The lowest supported and latest version of a framework are always tested, with a number of releases in between: + - If the package has majors, we pick the highest version of each major. For the latest major, we also pick the lowest version in that major. - If the package doesn't have multiple majors, we pick two versions in between @@ -35,7 +36,8 @@ the main package (framework, library) to test with; any additional test dependencies, optionally gated behind specific conditions; and optionally the Python versions to test on. -Constraints are defined using the format specified below. The following sections describe each key. +Constraints are defined using the format specified below. The following sections +describe each key. ``` integration_name: { @@ -46,6 +48,7 @@ integration_name: { }, "python": python_version_specifier, "include": package_version_specifier, + "test_on_all_python_versions": bool, } ``` @@ -68,11 +71,12 @@ The test dependencies of the test suite. They're defined as a dictionary of in the package list of a rule will be installed as long as the rule applies. `rule`s are predefined. Each `rule` must be one of the following: - - `*`: packages will be always installed - - a version specifier on the main package (e.g. `<=0.32`): packages will only - be installed if the main package falls into the version bounds specified - - specific Python version(s) in the form `py3.8,py3.9`: packages will only be - installed if the Python version matches one from the list + +- `*`: packages will be always installed +- a version specifier on the main package (e.g. `<=0.32`): packages will only + be installed if the main package falls into the version bounds specified +- specific Python version(s) in the form `py3.8,py3.9`: packages will only be + installed if the Python version matches one from the list Rules can be used to specify version bounds on older versions of the main package's dependencies, for example. If e.g. Flask tests generally need @@ -101,6 +105,7 @@ Python versions, you can say: ... } ``` + This key is optional. ### `python` @@ -145,7 +150,6 @@ The `include` key can also be used to exclude a set of specific versions by usin `!=` version specifiers. For example, the Starlite restriction above could equivalently be expressed like so: - ```python "starlite": { "include": "!=2.0.0a1,!=2.0.0a2", @@ -153,6 +157,19 @@ be expressed like so: } ``` +### `test_on_all_python_versions` + +By default, the script will cherry-pick a few Python versions to test each +integration on. If you want a test suite to run on all supported Python versions +instead, set `test_on_all_python_versions` to `True`. + +```python +"common": { + # The common test suite should run on all Python versions + "test_on_all_python_versions": True, + ... +} +``` ## How-Tos @@ -176,7 +193,8 @@ A handful of integration test suites are still hardcoded. The goal is to migrate them all to `populate_tox.py` over time. 1. Remove the integration from the `IGNORE` list in `populate_tox.py`. -2. Remove the hardcoded entries for the integration from the `envlist` and `deps` sections of `tox.jinja`. +2. Remove the hardcoded entries for the integration from the `envlist` and `deps` + sections of `tox.jinja`. 3. Run `scripts/generate-test-files.sh`. 4. Run the test suite, either locally or by creating a PR. 5. Address any test failures that happen. @@ -185,6 +203,7 @@ You might have to introduce additional version bounds on the dependencies of the package. Try to determine the source of the failure and address it. Common scenarios: + - An old version of the tested package installs a dependency without defining an upper version bound on it. A new version of the dependency is installed that is incompatible with the package. In this case you need to determine which diff --git a/scripts/populate_tox/config.py b/scripts/populate_tox/config.py index 4d5d5b14ce..0012c26ba6 100644 --- a/scripts/populate_tox/config.py +++ b/scripts/populate_tox/config.py @@ -39,12 +39,24 @@ "package": "celery", "deps": { "*": ["newrelic", "redis"], - "py3.7": ["importlib-metadata<5.0"], }, + "python": ">=3.8", }, "clickhouse_driver": { "package": "clickhouse-driver", }, + "common": { + "package": "opentelemetry-sdk", + "test_on_all_python_versions": True, + "deps": { + "*": ["pytest", "pytest-asyncio"], + # See https://github.com/pytest-dev/pytest/issues/9621 + # and https://github.com/pytest-dev/pytest-forked/issues/67 + # for justification of the upper bound on pytest + "py3.7": ["pytest<7.0.0"], + "py3.8": ["hypothesis"], + }, + }, "cohere": { "package": "cohere", "python": ">=3.9", @@ -53,6 +65,7 @@ "package": "django", "deps": { "*": [ + "channels[daphne]", "psycopg2-binary", "djangorestframework", "pytest-django", @@ -65,7 +78,6 @@ "Werkzeug<2.1.0", ], "<3.1": ["pytest-django<4.0"], - ">=2.0": ["channels[daphne]"], }, }, "dramatiq": { @@ -226,7 +238,7 @@ "package": "trytond", "deps": { "*": ["werkzeug"], - "<=5.0": ["werkzeug<1.0"], + "<5.1": ["werkzeug<1.0"], }, }, "typer": { diff --git a/scripts/populate_tox/populate_tox.py b/scripts/populate_tox/populate_tox.py index 0aeb0f02ef..fade7472c8 100644 --- a/scripts/populate_tox/populate_tox.py +++ b/scripts/populate_tox/populate_tox.py @@ -61,7 +61,6 @@ "asgi", "aws_lambda", "cloud_resource_context", - "common", "gevent", "opentelemetry", "potel", @@ -346,22 +345,28 @@ def supported_python_versions( return supported -def pick_python_versions_to_test(python_versions: list[Version]) -> list[Version]: +def pick_python_versions_to_test( + python_versions: list[Version], test_all: bool = False +) -> list[Version]: """ Given a list of Python versions, pick those that make sense to test on. Currently, this is the oldest, the newest, and the second newest Python version. """ - filtered_python_versions = { - python_versions[0], - } + if test_all: + filtered_python_versions = python_versions - filtered_python_versions.add(python_versions[-1]) - try: - filtered_python_versions.add(python_versions[-2]) - except IndexError: - pass + else: + filtered_python_versions = { + python_versions[0], + } + + filtered_python_versions.add(python_versions[-1]) + try: + filtered_python_versions.add(python_versions[-2]) + except IndexError: + pass return sorted(filtered_python_versions) @@ -515,6 +520,9 @@ def _add_python_versions_to_release( time.sleep(PYPI_COOLDOWN) # give PYPI some breathing room + test_on_all_python_versions = ( + TEST_SUITE_CONFIG[integration].get("test_on_all_python_versions") or False + ) target_python_versions = TEST_SUITE_CONFIG[integration].get("python") if target_python_versions: target_python_versions = SpecifierSet(target_python_versions) @@ -523,7 +531,8 @@ def _add_python_versions_to_release( supported_python_versions( determine_python_versions(release_pypi_data), target_python_versions, - ) + ), + test_all=test_on_all_python_versions, ) release.rendered_python_versions = _render_python_versions(release.python_versions) diff --git a/scripts/populate_tox/tox.jinja b/scripts/populate_tox/tox.jinja index 2869da275b..b24190e231 100644 --- a/scripts/populate_tox/tox.jinja +++ b/scripts/populate_tox/tox.jinja @@ -17,11 +17,8 @@ requires = # This version introduced using pip 24.1 which does not work with older Celery and HTTPX versions. virtualenv<20.26.3 envlist = - # === Common === - {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common - # === Gevent === - {py3.6,py3.8,py3.10,py3.11,py3.12}-gevent + {py3.8,py3.10,py3.11,py3.12}-gevent # === Integrations === # General format is {pythonversion}-{integrationname}-v{frameworkversion} @@ -55,24 +52,24 @@ envlist = {py3.8,py3.11}-beam-latest # Boto3 - {py3.6,py3.7}-boto3-v{1.12} + {py3.7}-boto3-v{1.12} {py3.7,py3.11,py3.12}-boto3-v{1.23} {py3.11,py3.12}-boto3-v{1.34} {py3.11,py3.12,py3.13}-boto3-latest # Chalice - {py3.6,py3.9}-chalice-v{1.16} + {py3.7,py3.9}-chalice-v{1.16} {py3.8,py3.12,py3.13}-chalice-latest # Cloud Resource Context - {py3.6,py3.12,py3.13}-cloud_resource_context + {py3.7,py3.12,py3.13}-cloud_resource_context # GCP {py3.7}-gcp # HTTPX - {py3.6,py3.9}-httpx-v{0.16,0.18} - {py3.6,py3.10}-httpx-v{0.20,0.22} + {py3.7,py3.9}-httpx-v{0.16,0.18} + {py3.7,py3.10}-httpx-v{0.20,0.22} {py3.7,py3.11,py3.12}-httpx-v{0.23,0.24} {py3.9,py3.11,py3.12}-httpx-v{0.25,0.27} {py3.9,py3.12,py3.13}-httpx-latest @@ -90,14 +87,8 @@ envlist = {py3.9,py3.11,py3.12}-openai-latest {py3.9,py3.11,py3.12}-openai-notiktoken - # OpenTelemetry (OTel) - {py3.7,py3.9,py3.12,py3.13}-opentelemetry - - # OpenTelemetry Experimental (POTel) - {py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-potel - # pure_eval - {py3.6,py3.12,py3.13}-pure_eval + {py3.7,py3.12,py3.13}-pure_eval # Quart {py3.7,py3.11}-quart-v{0.16} @@ -109,24 +100,23 @@ envlist = {py3.10,py3.11}-ray-latest # Redis - {py3.6,py3.8}-redis-v{3} + {py3.7,py3.8}-redis-v{3} {py3.7,py3.8,py3.11}-redis-v{4} {py3.7,py3.11,py3.12}-redis-v{5} {py3.7,py3.12,py3.13}-redis-latest # Requests - {py3.6,py3.8,py3.12,py3.13}-requests + {py3.7,py3.8,py3.12,py3.13}-requests # RQ (Redis Queue) - {py3.6}-rq-v{0.6} - {py3.6,py3.9}-rq-v{0.13,1.0} - {py3.6,py3.11}-rq-v{1.5,1.10} + {py3.7,py3.9}-rq-v{0.13,1.0} + {py3.7,py3.11}-rq-v{1.5,1.10} {py3.7,py3.11,py3.12}-rq-v{1.15,1.16} {py3.7,py3.12,py3.13}-rq-latest # Sanic - {py3.6,py3.7}-sanic-v{0.8} - {py3.6,py3.8}-sanic-v{20} + {py3.7}-sanic-v{0.8} + {py3.8}-sanic-v{20} {py3.8,py3.11,py3.12}-sanic-v{24.6} {py3.9,py3.12,py3.13}-sanic-latest @@ -155,22 +145,13 @@ deps = linters: -r requirements-linting.txt linters: werkzeug<2.3.0 - # === Common === - py3.8-common: hypothesis - common: pytest-asyncio - # See https://github.com/pytest-dev/pytest/issues/9621 - # and https://github.com/pytest-dev/pytest-forked/issues/67 - # for justification of the upper bound on pytest - {py3.6,py3.7}-common: pytest<7.0.0 - {py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common: pytest - # === Gevent === - {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-gevent: gevent>=22.10.0, <22.11.0 + {py3.7,py3.8,py3.9,py3.10,py3.11}-gevent: gevent>=22.10.0, <22.11.0 {py3.12}-gevent: gevent # See https://github.com/pytest-dev/pytest/issues/9621 # and https://github.com/pytest-dev/pytest-forked/issues/67 # for justification of the upper bound on pytest - {py3.6,py3.7}-gevent: pytest<7.0.0 + py3.7-gevent: pytest<7.0.0 {py3.8,py3.9,py3.10,py3.11,py3.12}-gevent: pytest # === Integrations === @@ -264,12 +245,6 @@ deps = openai-latest: tiktoken~=0.6.0 openai-notiktoken: openai - # OpenTelemetry (OTel) - opentelemetry: opentelemetry-distro - - # OpenTelemetry Experimental (POTel) - potel: -e .[opentelemetry-experimental] - # pure_eval pure_eval: pure_eval @@ -294,7 +269,7 @@ deps = # Redis redis: fakeredis!=1.7.4 redis: pytest<8.0.0 - {py3.6,py3.7}-redis: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 + py3.7-redis: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 {py3.7,py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-redis: pytest-asyncio redis-v3: redis~=3.0 redis-v4: redis~=4.0 @@ -306,13 +281,11 @@ deps = # RQ (Redis Queue) # https://github.com/jamesls/fakeredis/issues/245 - rq-v{0.6}: fakeredis<1.0 - rq-v{0.6}: redis<3.2.2 rq-v{0.13,1.0,1.5,1.10}: fakeredis>=1.0,<1.7.4 rq-v{1.15,1.16}: fakeredis<2.28.0 - {py3.6,py3.7}-rq-v{1.15,1.16}: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 + py3.7-rq-v{1.15,1.16}: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 rq-latest: fakeredis<2.28.0 - {py3.6,py3.7}-rq-latest: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 + py3.7-rq-latest: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 rq-v0.6: rq~=0.6.0 rq-v0.13: rq~=0.13.0 rq-v1.0: rq~=1.0.0 @@ -327,7 +300,6 @@ deps = sanic: aiohttp sanic-v{24.6}: sanic_testing sanic-latest: sanic_testing - {py3.6}-sanic: aiocontextvars==0.2.1 sanic-v0.8: sanic~=0.8.0 sanic-v20: sanic~=20.0 sanic-v24.6: sanic~=24.6.0 @@ -359,9 +331,9 @@ setenv = PYTHONDONTWRITEBYTECODE=1 OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES COVERAGE_FILE=.coverage-sentry-{envname} - py3.6: COVERAGE_RCFILE=.coveragerc36 django: DJANGO_SETTINGS_MODULE=tests.integrations.django.myapp.settings + py3.12-django: PIP_CONSTRAINT=constraints.txt common: TESTPATH=tests gevent: TESTPATH=tests @@ -398,8 +370,6 @@ setenv = loguru: TESTPATH=tests/integrations/loguru openai: TESTPATH=tests/integrations/openai openfeature: TESTPATH=tests/integrations/openfeature - opentelemetry: TESTPATH=tests/integrations/opentelemetry - potel: TESTPATH=tests/integrations/opentelemetry pure_eval: TESTPATH=tests/integrations/pure_eval pymongo: TESTPATH=tests/integrations/pymongo pyramid: TESTPATH=tests/integrations/pyramid @@ -437,7 +407,6 @@ extras = pymongo: pymongo basepython = - py3.6: python3.6 py3.7: python3.7 py3.8: python3.8 py3.9: python3.9 diff --git a/scripts/split_tox_gh_actions/templates/test_group.jinja b/scripts/split_tox_gh_actions/templates/test_group.jinja index 901e4808e4..cd2b45805b 100644 --- a/scripts/split_tox_gh_actions/templates/test_group.jinja +++ b/scripts/split_tox_gh_actions/templates/test_group.jinja @@ -6,10 +6,6 @@ fail-fast: false matrix: python-version: [{{ py_versions.get(category)|join(",") }}] - # python3.6 reached EOL and is no longer being supported on - # new versions of hosted runners on Github Actions - # ubuntu-20.04 is the last version that supported python3.6 - # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 os: [ubuntu-22.04] {% if needs_docker %} @@ -34,17 +30,14 @@ ports: - 5432:5432 env: - SENTRY_PYTHON_TEST_POSTGRES_HOST: {% raw %}${{ matrix.python-version == '3.6' && 'postgres' || 'localhost' }}{% endraw %} + SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost SENTRY_PYTHON_TEST_POSTGRES_USER: postgres SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry {% endif %} - # Use Docker container only for Python 3.6 - {% raw %}container: ${{ matrix.python-version == '3.6' && 'python:3.6' || null }}{% endraw %} steps: - uses: actions/checkout@v4.2.2 - uses: actions/setup-python@v5 - {% raw %}if: ${{ matrix.python-version != '3.6' }}{% endraw %} with: python-version: {% raw %}${{ matrix.python-version }}{% endraw %} allow-prereleases: true @@ -76,15 +69,8 @@ {% endif %} {% endfor %} - - name: Generate coverage XML (Python 3.6) - if: {% raw %}${{ !cancelled() && matrix.python-version == '3.6' }}{% endraw %} - run: | - export COVERAGE_RCFILE=.coveragerc36 - coverage combine .coverage-sentry-* - coverage xml --ignore-errors - - name: Generate coverage XML - if: {% raw %}${{ !cancelled() && matrix.python-version != '3.6' }}{% endraw %} + if: {% raw %}${{ !cancelled() }}{% endraw %} run: | coverage combine .coverage-sentry-* coverage xml diff --git a/sentry_sdk/__init__.py b/sentry_sdk/__init__.py index b4859cc5d2..b35c446dc0 100644 --- a/sentry_sdk/__init__.py +++ b/sentry_sdk/__init__.py @@ -1,4 +1,6 @@ -from sentry_sdk.scope import Scope +# TODO-neel scope switch +# TODO-neel avoid duplication between api and __init__ +from sentry_sdk.opentelemetry.scope import PotelScope as Scope from sentry_sdk.transport import Transport, HttpTransport from sentry_sdk.client import Client @@ -7,7 +9,6 @@ from sentry_sdk.consts import VERSION # noqa __all__ = [ # noqa - "Hub", "Scope", "Client", "Transport", @@ -19,7 +20,6 @@ "capture_event", "capture_exception", "capture_message", - "configure_scope", "continue_trace", "flush", "get_baggage", @@ -33,11 +33,9 @@ "isolation_scope", "last_event_id", "new_scope", - "push_scope", "set_context", "set_extra", "set_level", - "set_measurement", "set_tag", "set_tags", "set_user", @@ -53,6 +51,3 @@ init_debug_support() del init_debug_support - -# circular imports -from sentry_sdk.hub import Hub diff --git a/sentry_sdk/_compat.py b/sentry_sdk/_compat.py index a811cf2120..fc04ed5859 100644 --- a/sentry_sdk/_compat.py +++ b/sentry_sdk/_compat.py @@ -9,7 +9,6 @@ T = TypeVar("T") -PY37 = sys.version_info[0] == 3 and sys.version_info[1] >= 7 PY38 = sys.version_info[0] == 3 and sys.version_info[1] >= 8 PY310 = sys.version_info[0] == 3 and sys.version_info[1] >= 10 PY311 = sys.version_info[0] == 3 and sys.version_info[1] >= 11 diff --git a/sentry_sdk/_init_implementation.py b/sentry_sdk/_init_implementation.py index eb02b3d11e..34e9d071e9 100644 --- a/sentry_sdk/_init_implementation.py +++ b/sentry_sdk/_init_implementation.py @@ -1,48 +1,11 @@ -import warnings - from typing import TYPE_CHECKING import sentry_sdk +from sentry_sdk.consts import ClientConstructor +from sentry_sdk.opentelemetry.scope import setup_scope_context_management if TYPE_CHECKING: - from typing import Any, ContextManager, Optional - - import sentry_sdk.consts - - -class _InitGuard: - _CONTEXT_MANAGER_DEPRECATION_WARNING_MESSAGE = ( - "Using the return value of sentry_sdk.init as a context manager " - "and manually calling the __enter__ and __exit__ methods on the " - "return value are deprecated. We are no longer maintaining this " - "functionality, and we will remove it in the next major release." - ) - - def __init__(self, client): - # type: (sentry_sdk.Client) -> None - self._client = client - - def __enter__(self): - # type: () -> _InitGuard - warnings.warn( - self._CONTEXT_MANAGER_DEPRECATION_WARNING_MESSAGE, - stacklevel=2, - category=DeprecationWarning, - ) - - return self - - def __exit__(self, exc_type, exc_value, tb): - # type: (Any, Any, Any) -> None - warnings.warn( - self._CONTEXT_MANAGER_DEPRECATION_WARNING_MESSAGE, - stacklevel=2, - category=DeprecationWarning, - ) - - c = self._client - if c is not None: - c.close() + from typing import Any, Optional def _check_python_deprecations(): @@ -54,16 +17,15 @@ def _check_python_deprecations(): def _init(*args, **kwargs): - # type: (*Optional[str], **Any) -> ContextManager[Any] + # type: (*Optional[str], **Any) -> None """Initializes the SDK and optionally integrations. This takes the same arguments as the client constructor. """ + setup_scope_context_management() client = sentry_sdk.Client(*args, **kwargs) sentry_sdk.get_global_scope().set_client(client) _check_python_deprecations() - rv = _InitGuard(client) - return rv if TYPE_CHECKING: @@ -73,7 +35,7 @@ def _init(*args, **kwargs): # Use `ClientConstructor` to define the argument types of `init` and # `ContextManager[Any]` to tell static analyzers about the return type. - class init(sentry_sdk.consts.ClientConstructor, _InitGuard): # noqa: N801 + class init(ClientConstructor): # noqa: N801 pass else: diff --git a/sentry_sdk/_types.py b/sentry_sdk/_types.py index 7da76e63dc..79260e3431 100644 --- a/sentry_sdk/_types.py +++ b/sentry_sdk/_types.py @@ -107,9 +107,7 @@ def substituted_because_contains_sensitive_data(cls): from typing import Callable from typing import Dict from typing import Mapping - from typing import NotRequired from typing import Optional - from typing import Tuple from typing import Type from typing_extensions import Literal, TypedDict @@ -121,45 +119,6 @@ class SDKInfo(TypedDict): # "critical" is an alias of "fatal" recognized by Relay LogLevelStr = Literal["fatal", "critical", "error", "warning", "info", "debug"] - DurationUnit = Literal[ - "nanosecond", - "microsecond", - "millisecond", - "second", - "minute", - "hour", - "day", - "week", - ] - - InformationUnit = Literal[ - "bit", - "byte", - "kilobyte", - "kibibyte", - "megabyte", - "mebibyte", - "gigabyte", - "gibibyte", - "terabyte", - "tebibyte", - "petabyte", - "pebibyte", - "exabyte", - "exbibyte", - ] - - FractionUnit = Literal["ratio", "percent"] - MeasurementUnit = Union[DurationUnit, InformationUnit, FractionUnit, str] - - MeasurementValue = TypedDict( - "MeasurementValue", - { - "value": float, - "unit": NotRequired[Optional[MeasurementUnit]], - }, - ) - Event = TypedDict( "Event", { @@ -181,7 +140,6 @@ class SDKInfo(TypedDict): "level": LogLevelStr, "logentry": Mapping[str, object], "logger": str, - "measurements": dict[str, MeasurementValue], "message": str, "modules": dict[str, str], "monitor_config": Mapping[str, object], @@ -210,7 +168,6 @@ class SDKInfo(TypedDict): "type": Literal["check_in", "transaction"], "user": dict[str, object], "_dropped_spans": int, - "_metrics_summary": dict[str, object], }, total=False, ) @@ -266,7 +223,6 @@ class SDKInfo(TypedDict): "internal", "profile", "profile_chunk", - "metric_bucket", "monitor", "span", "log", @@ -276,26 +232,6 @@ class SDKInfo(TypedDict): ContinuousProfilerMode = Literal["thread", "gevent", "unknown"] ProfilerMode = Union[ContinuousProfilerMode, Literal["sleep"]] - # Type of the metric. - MetricType = Literal["d", "s", "g", "c"] - - # Value of the metric. - MetricValue = Union[int, float, str] - - # Internal representation of tags as a tuple of tuples (this is done in order to allow for the same key to exist - # multiple times). - MetricTagsInternal = Tuple[Tuple[str, str], ...] - - # External representation of tags as a dictionary. - MetricTagValue = Union[str, int, float, None] - MetricTags = Mapping[str, MetricTagValue] - - # Value inside the generator for the metric value. - FlushedMetricValue = Union[int, float] - - BucketKey = Tuple[MetricType, str, MeasurementUnit, MetricTagsInternal] - MetricMetaKey = Tuple[MetricType, str, MeasurementUnit] - MonitorConfigScheduleType = Literal["crontab", "interval"] MonitorConfigScheduleUnit = Literal[ "year", @@ -331,3 +267,5 @@ class SDKInfo(TypedDict): ) HttpStatusCodeRange = Union[int, Container[int]] + + OtelExtractedSpanData = tuple[str, str, Optional[str], Optional[int], Optional[str]] diff --git a/sentry_sdk/ai/monitoring.py b/sentry_sdk/ai/monitoring.py index 860833b8f5..2b6a1cdf72 100644 --- a/sentry_sdk/ai/monitoring.py +++ b/sentry_sdk/ai/monitoring.py @@ -33,13 +33,15 @@ def sync_wrapped(*args, **kwargs): curr_pipeline = _ai_pipeline_name.get() op = span_kwargs.get("op", "ai.run" if curr_pipeline else "ai.pipeline") - with start_span(name=description, op=op, **span_kwargs) as span: + with start_span( + name=description, op=op, only_if_parent=True, **span_kwargs + ) as span: for k, v in kwargs.pop("sentry_tags", {}).items(): span.set_tag(k, v) for k, v in kwargs.pop("sentry_data", {}).items(): - span.set_data(k, v) + span.set_attribute(k, v) if curr_pipeline: - span.set_data("ai.pipeline.name", curr_pipeline) + span.set_attribute("ai.pipeline.name", curr_pipeline) return f(*args, **kwargs) else: _ai_pipeline_name.set(description) @@ -62,13 +64,15 @@ async def async_wrapped(*args, **kwargs): curr_pipeline = _ai_pipeline_name.get() op = span_kwargs.get("op", "ai.run" if curr_pipeline else "ai.pipeline") - with start_span(name=description, op=op, **span_kwargs) as span: + with start_span( + name=description, op=op, only_if_parent=True, **span_kwargs + ) as span: for k, v in kwargs.pop("sentry_tags", {}).items(): span.set_tag(k, v) for k, v in kwargs.pop("sentry_data", {}).items(): - span.set_data(k, v) + span.set_attribute(k, v) if curr_pipeline: - span.set_data("ai.pipeline.name", curr_pipeline) + span.set_attribute("ai.pipeline.name", curr_pipeline) return await f(*args, **kwargs) else: _ai_pipeline_name.set(description) @@ -100,11 +104,11 @@ def record_token_usage( # type: (Span, Optional[int], Optional[int], Optional[int]) -> None ai_pipeline_name = get_ai_pipeline_name() if ai_pipeline_name: - span.set_data("ai.pipeline.name", ai_pipeline_name) + span.set_attribute("ai.pipeline.name", ai_pipeline_name) if prompt_tokens is not None: - span.set_measurement("ai_prompt_tokens_used", value=prompt_tokens) + span.set_attribute("ai.prompt_tokens.used", prompt_tokens) if completion_tokens is not None: - span.set_measurement("ai_completion_tokens_used", value=completion_tokens) + span.set_attribute("ai.completion_tokens.used", completion_tokens) if ( total_tokens is None and prompt_tokens is not None @@ -112,4 +116,4 @@ def record_token_usage( ): total_tokens = prompt_tokens + completion_tokens if total_tokens is not None: - span.set_measurement("ai_total_tokens_used", total_tokens) + span.set_attribute("ai.total_tokens.used", total_tokens) diff --git a/sentry_sdk/ai/utils.py b/sentry_sdk/ai/utils.py index ed3494f679..5868606940 100644 --- a/sentry_sdk/ai/utils.py +++ b/sentry_sdk/ai/utils.py @@ -29,4 +29,4 @@ def _normalize_data(data): def set_data_normalized(span, key, value): # type: (Span, str, Any) -> None normalized = _normalize_data(value) - span.set_data(key, normalized) + span.set_attribute(key, normalized) diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index a6b3c293dc..b8a2498d5d 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -1,14 +1,22 @@ import inspect -import warnings from contextlib import contextmanager from sentry_sdk import tracing_utils, Client from sentry_sdk._init_implementation import init -from sentry_sdk.consts import INSTRUMENTER -from sentry_sdk.scope import Scope, _ScopeManager, new_scope, isolation_scope -from sentry_sdk.tracing import NoOpSpan, Transaction, trace +from sentry_sdk.tracing import trace from sentry_sdk.crons import monitor +# TODO-neel-potel make 2 scope strategies/impls and switch +from sentry_sdk.scope import Scope as BaseScope +from sentry_sdk.opentelemetry.scope import ( + PotelScope as Scope, + new_scope, + isolation_scope, + use_scope, + use_isolation_scope, +) + + from typing import TYPE_CHECKING if TYPE_CHECKING: @@ -16,36 +24,16 @@ from typing import Any from typing import Dict - from typing import Generator from typing import Optional - from typing import overload from typing import Callable from typing import TypeVar - from typing import ContextManager from typing import Union + from typing import Generator - from typing_extensions import Unpack - - from sentry_sdk.client import BaseClient - from sentry_sdk._types import ( - Event, - Hint, - Breadcrumb, - BreadcrumbHint, - ExcInfo, - MeasurementUnit, - LogLevelStr, - SamplingContext, - ) - from sentry_sdk.tracing import Span, TransactionKwargs + import sentry_sdk T = TypeVar("T") F = TypeVar("F", bound=Callable[..., Any]) -else: - - def overload(x): - # type: (T) -> T - return x # When changing this, update __all__ in __init__.py too @@ -55,7 +43,6 @@ def overload(x): "capture_event", "capture_exception", "capture_message", - "configure_scope", "continue_trace", "flush", "get_baggage", @@ -69,11 +56,9 @@ def overload(x): "isolation_scope", "last_event_id", "new_scope", - "push_scope", "set_context", "set_extra", "set_level", - "set_measurement", "set_tag", "set_tags", "set_user", @@ -81,6 +66,8 @@ def overload(x): "start_transaction", "trace", "monitor", + "use_scope", + "use_isolation_scope", ] @@ -104,7 +91,7 @@ def clientmethod(f): @scopemethod def get_client(): - # type: () -> BaseClient + # type: () -> sentry_sdk.client.BaseClient return Scope.get_client() @@ -124,7 +111,7 @@ def is_initialized(): @scopemethod def get_global_scope(): - # type: () -> Scope + # type: () -> BaseScope return Scope.get_global_scope() @@ -152,8 +139,8 @@ def last_event_id(): @scopemethod def capture_event( - event, # type: Event - hint=None, # type: Optional[Hint] + event, # type: sentry_sdk._types.Event + hint=None, # type: Optional[sentry_sdk._types.Hint] scope=None, # type: Optional[Any] **scope_kwargs, # type: Any ): @@ -164,7 +151,7 @@ def capture_event( @scopemethod def capture_message( message, # type: str - level=None, # type: Optional[LogLevelStr] + level=None, # type: Optional[sentry_sdk._types.LogLevelStr] scope=None, # type: Optional[Any] **scope_kwargs, # type: Any ): @@ -176,7 +163,7 @@ def capture_message( @scopemethod def capture_exception( - error=None, # type: Optional[Union[BaseException, ExcInfo]] + error=None, # type: Optional[Union[BaseException, sentry_sdk._types.ExcInfo]] scope=None, # type: Optional[Any] **scope_kwargs, # type: Any ): @@ -186,109 +173,14 @@ def capture_exception( @scopemethod def add_breadcrumb( - crumb=None, # type: Optional[Breadcrumb] - hint=None, # type: Optional[BreadcrumbHint] + crumb=None, # type: Optional[sentry_sdk._types.Breadcrumb] + hint=None, # type: Optional[sentry_sdk._types.BreadcrumbHint] **kwargs, # type: Any ): # type: (...) -> None return get_isolation_scope().add_breadcrumb(crumb, hint, **kwargs) -@overload -def configure_scope(): - # type: () -> ContextManager[Scope] - pass - - -@overload -def configure_scope( # noqa: F811 - callback, # type: Callable[[Scope], None] -): - # type: (...) -> None - pass - - -def configure_scope( # noqa: F811 - callback=None, # type: Optional[Callable[[Scope], None]] -): - # type: (...) -> Optional[ContextManager[Scope]] - """ - Reconfigures the scope. - - :param callback: If provided, call the callback with the current scope. - - :returns: If no callback is provided, returns a context manager that returns the scope. - """ - warnings.warn( - "sentry_sdk.configure_scope is deprecated and will be removed in the next major version. " - "Please consult our migration guide to learn how to migrate to the new API: " - "https://docs.sentry.io/platforms/python/migration/1.x-to-2.x#scope-configuring", - DeprecationWarning, - stacklevel=2, - ) - - scope = get_isolation_scope() - scope.generate_propagation_context() - - if callback is not None: - # TODO: used to return None when client is None. Check if this changes behavior. - callback(scope) - - return None - - @contextmanager - def inner(): - # type: () -> Generator[Scope, None, None] - yield scope - - return inner() - - -@overload -def push_scope(): - # type: () -> ContextManager[Scope] - pass - - -@overload -def push_scope( # noqa: F811 - callback, # type: Callable[[Scope], None] -): - # type: (...) -> None - pass - - -def push_scope( # noqa: F811 - callback=None, # type: Optional[Callable[[Scope], None]] -): - # type: (...) -> Optional[ContextManager[Scope]] - """ - Pushes a new layer on the scope stack. - - :param callback: If provided, this method pushes a scope, calls - `callback`, and pops the scope again. - - :returns: If no `callback` is provided, a context manager that should - be used to pop the scope again. - """ - warnings.warn( - "sentry_sdk.push_scope is deprecated and will be removed in the next major version. " - "Please consult our migration guide to learn how to migrate to the new API: " - "https://docs.sentry.io/platforms/python/migration/1.x-to-2.x#scope-pushing", - DeprecationWarning, - stacklevel=2, - ) - - if callback is not None: - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - with push_scope() as scope: - callback(scope) - return None - - return _ScopeManager() - - @scopemethod def set_tag(key, value): # type: (str, Any) -> None @@ -321,7 +213,7 @@ def set_user(value): @scopemethod def set_level(value): - # type: (LogLevelStr) -> None + # type: (sentry_sdk._types.LogLevelStr) -> None return get_isolation_scope().set_level(value) @@ -334,23 +226,33 @@ def flush( return get_client().flush(timeout=timeout, callback=callback) -@scopemethod -def start_span( - **kwargs, # type: Any -): - # type: (...) -> Span +def start_span(**kwargs): + # type: (Any) -> sentry_sdk.tracing.Span + """ + Start and return a span. + + This is the entry point to manual tracing instrumentation. + + A tree structure can be built by adding child spans to the span. + To start a new child span within the span, call the `start_child()` method. + + When used as a context manager, spans are automatically finished at the end + of the `with` block. If not using context managers, call the `finish()` + method. + """ return get_current_scope().start_span(**kwargs) -@scopemethod def start_transaction( - transaction=None, # type: Optional[Transaction] - instrumenter=INSTRUMENTER.SENTRY, # type: str - custom_sampling_context=None, # type: Optional[SamplingContext] - **kwargs, # type: Unpack[TransactionKwargs] + transaction=None, # type: Optional[sentry_sdk.tracing.Span] + **kwargs, # type: Any ): - # type: (...) -> Union[Transaction, NoOpSpan] + # type: (...) -> sentry_sdk.tracing.Span """ + .. deprecated:: 3.0.0 + This function is deprecated and will be removed in a future release. + Use :py:meth:`sentry_sdk.start_span` instead. + Start and return a transaction on the current scope. Start an existing transaction if given, otherwise create and start a new @@ -374,31 +276,18 @@ def start_transaction( :param transaction: The transaction to start. If omitted, we create and start a new transaction. - :param instrumenter: This parameter is meant for internal use only. It - will be removed in the next major version. - :param custom_sampling_context: The transaction's custom sampling context. :param kwargs: Optional keyword arguments to be passed to the Transaction constructor. See :py:class:`sentry_sdk.tracing.Transaction` for available arguments. """ - return get_current_scope().start_transaction( - transaction, instrumenter, custom_sampling_context, **kwargs + return start_span( + span=transaction, + **kwargs, ) -def set_measurement(name, value, unit=""): - # type: (str, float, MeasurementUnit) -> None - """ - .. deprecated:: 2.28.0 - This function is deprecated and will be removed in the next major release. - """ - transaction = get_current_scope().transaction - if transaction is not None: - transaction.set_measurement(name, value, unit) - - def get_current_span(scope=None): - # type: (Optional[Scope]) -> Optional[Span] + # type: (Optional[Scope]) -> Optional[sentry_sdk.tracing.Span] """ Returns the currently active span if there is one running, otherwise `None` """ @@ -425,13 +314,11 @@ def get_baggage(): return None -def continue_trace( - environ_or_headers, op=None, name=None, source=None, origin="manual" -): - # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str], str) -> Transaction +@contextmanager +def continue_trace(environ_or_headers): + # type: (Dict[str, Any]) -> Generator[None, None, None] """ - Sets the propagation context from environment or headers and returns a transaction. + Sets the propagation context from environment or headers to continue an incoming trace. """ - return get_isolation_scope().continue_trace( - environ_or_headers, op, name, source, origin - ) + with get_isolation_scope().continue_trace(environ_or_headers): + yield diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index f06166bcc8..2f4349253b 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -6,9 +6,8 @@ from datetime import datetime, timezone from importlib import import_module from typing import TYPE_CHECKING, List, Dict, cast, overload -import warnings -from sentry_sdk._compat import PY37, check_uwsgi_thread_support +from sentry_sdk._compat import check_uwsgi_thread_support from sentry_sdk.utils import ( AnnotatedValue, ContextVar, @@ -20,7 +19,6 @@ get_type_name, get_default_release, handle_in_app, - is_gevent, logger, ) from sentry_sdk.serializer import serialize @@ -30,14 +28,14 @@ SPANDATA, DEFAULT_MAX_VALUE_LENGTH, DEFAULT_OPTIONS, - INSTRUMENTER, VERSION, ClientConstructor, ) -from sentry_sdk.integrations import _DEFAULT_INTEGRATIONS, setup_integrations +from sentry_sdk.integrations import setup_integrations from sentry_sdk.integrations.dedupe import DedupeIntegration from sentry_sdk.sessions import SessionFlusher from sentry_sdk.envelope import Envelope + from sentry_sdk.profiler.continuous_profiler import setup_continuous_profiler from sentry_sdk.profiler.transaction_profiler import ( has_profiling_enabled, @@ -59,7 +57,6 @@ from sentry_sdk._types import Event, Hint, SDKInfo, Log from sentry_sdk.integrations import Integration - from sentry_sdk.metrics import MetricsAggregator from sentry_sdk.scope import Scope from sentry_sdk.session import Session from sentry_sdk.spotlight import SpotlightClient @@ -115,9 +112,6 @@ def _get_options(*args, **kwargs): if rv["server_name"] is None and hasattr(socket, "gethostname"): rv["server_name"] = socket.gethostname() - if rv["instrumenter"] is None: - rv["instrumenter"] = INSTRUMENTER.SENTRY - if rv["project_root"] is None: try: project_root = os.getcwd() @@ -126,9 +120,6 @@ def _get_options(*args, **kwargs): rv["project_root"] = project_root - if rv["enable_tracing"] is True and rv["traces_sample_rate"] is None: - rv["traces_sample_rate"] = 1.0 - if rv["event_scrubber"] is None: rv["event_scrubber"] = EventScrubber( send_default_pii=( @@ -142,24 +133,9 @@ def _get_options(*args, **kwargs): ) rv["socket_options"] = None - if rv["enable_tracing"] is not None: - warnings.warn( - "The `enable_tracing` parameter is deprecated. Please use `traces_sample_rate` instead.", - DeprecationWarning, - stacklevel=2, - ) - return rv -try: - # Python 3.6+ - module_not_found_error = ModuleNotFoundError -except Exception: - # Older Python versions - module_not_found_error = ImportError # type: ignore - - class BaseClient: """ .. versionadded:: 2.0.0 @@ -177,7 +153,6 @@ def __init__(self, options=None): self.transport = None # type: Optional[Transport] self.monitor = None # type: Optional[Monitor] - self.metrics_aggregator = None # type: Optional[MetricsAggregator] self.log_batcher = None # type: Optional[LogBatcher] def __getstate__(self, *args, **kwargs): @@ -303,7 +278,7 @@ def _setup_instrumentation(self, functions_to_trace): function_obj = getattr(module_obj, function_name) setattr(module_obj, function_name, trace(function_obj)) logger.debug("Enabled tracing for %s", function_qualname) - except module_not_found_error: + except ModuleNotFoundError: try: # Try to import a class # ex: "mymodule.submodule.MyClassName.member_function" @@ -356,26 +331,7 @@ def _capture_envelope(envelope): self.session_flusher = SessionFlusher(capture_func=_capture_envelope) - self.metrics_aggregator = None # type: Optional[MetricsAggregator] experiments = self.options.get("_experiments", {}) - if experiments.get("enable_metrics", True): - # Context vars are not working correctly on Python <=3.6 - # with gevent. - metrics_supported = not is_gevent() or PY37 - if metrics_supported: - from sentry_sdk.metrics import MetricsAggregator - - self.metrics_aggregator = MetricsAggregator( - capture_func=_capture_envelope, - enable_code_locations=bool( - experiments.get("metric_code_locations", True) - ), - ) - else: - logger.info( - "Metrics not supported on Python 3.6 and lower with gevent." - ) - self.log_batcher = None if experiments.get("enable_logs", False): from sentry_sdk._log_batcher import LogBatcher @@ -390,19 +346,6 @@ def _capture_envelope(envelope): ) ) - if self.options["_experiments"].get("otel_powered_performance", False): - logger.debug( - "[OTel] Enabling experimental OTel-powered performance monitoring." - ) - self.options["instrumenter"] = INSTRUMENTER.OTEL - if ( - "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration" - not in _DEFAULT_INTEGRATIONS - ): - _DEFAULT_INTEGRATIONS.append( - "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration", - ) - self.integrations = setup_integrations( self.options["integrations"], with_defaults=self.options["default_integrations"], @@ -450,6 +393,13 @@ def _capture_envelope(envelope): except Exception as e: logger.debug("Can not set up continuous profiler. (%s)", e) + from sentry_sdk.opentelemetry.tracing import ( + patch_readable_span, + setup_sentry_tracing, + ) + + patch_readable_span() + setup_sentry_tracing() finally: _client_init_debug.set(old_debug) @@ -457,7 +407,6 @@ def _capture_envelope(envelope): if ( self.monitor - or self.metrics_aggregator or self.log_batcher or has_profiling_enabled(self.options) or isinstance(self.transport, BaseHttpTransport) @@ -524,7 +473,7 @@ def _prepare_event( ) return None - event = event_ + event = event_ # type: Optional[Event] # type: ignore[no-redef] spans_delta = spans_before - len( cast(List[Dict[str, object]], event.get("spans", [])) ) @@ -621,7 +570,7 @@ def _prepare_event( and event is not None and event.get("type") != "transaction" ): - new_event = None + new_event = None # type: Optional[Event] with capture_internal_exceptions(): new_event = before_send(event, hint or {}) if new_event is None: @@ -638,7 +587,7 @@ def _prepare_event( if event.get("exception"): DedupeIntegration.reset_last_seen() - event = new_event + event = new_event # type: Optional[Event] # type: ignore[no-redef] before_send_transaction = self.options["before_send_transaction"] if ( @@ -662,13 +611,15 @@ def _prepare_event( quantity=spans_before + 1, # +1 for the transaction itself ) else: - spans_delta = spans_before - len(new_event.get("spans", [])) + spans_delta = spans_before - len( + cast(List[Dict[str, object]], new_event.get("spans", [])) + ) if spans_delta > 0 and self.transport is not None: self.transport.record_lost_event( reason="before_send", data_category="span", quantity=spans_delta ) - event = new_event + event = new_event # type: Optional[Event] # type: ignore[no-redef] return event @@ -915,7 +866,7 @@ def _capture_experimental_log(self, current_scope, log): log["attributes"]["sentry.trace.parent_span_id"] = span.span_id if log.get("trace_id") is None: - transaction = current_scope.transaction + transaction = current_scope.root_span propagation_context = isolation_scope.get_active_propagation_context() if transaction is not None: log["trace_id"] = transaction.trace_id @@ -987,13 +938,15 @@ def close( """ if self.transport is not None: self.flush(timeout=timeout, callback=callback) + self.session_flusher.kill() - if self.metrics_aggregator is not None: - self.metrics_aggregator.kill() + if self.log_batcher is not None: self.log_batcher.kill() + if self.monitor: self.monitor.kill() + self.transport.kill() self.transport = None @@ -1014,10 +967,10 @@ def flush( if timeout is None: timeout = self.options["shutdown_timeout"] self.session_flusher.flush() - if self.metrics_aggregator is not None: - self.metrics_aggregator.flush() + if self.log_batcher is not None: self.log_batcher.flush() + self.transport.flush(timeout=timeout, callback=callback) def __enter__(self): diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index e1f18fe4ae..98d98a72ae 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -47,12 +47,9 @@ class CompressionAlgo(Enum): Event, EventProcessor, Hint, - MeasurementUnit, ProfilerMode, TracesSampler, TransactionProcessor, - MetricTags, - MetricValue, ) # Experiments are feature flags to enable and disable certain unstable SDK @@ -73,11 +70,6 @@ class CompressionAlgo(Enum): "transport_compression_algo": Optional[CompressionAlgo], "transport_num_pools": Optional[int], "transport_http2": Optional[bool], - "enable_metrics": Optional[bool], - "before_emit_metric": Optional[ - Callable[[str, MetricValue, MeasurementUnit, MetricTags], bool] - ], - "metric_code_locations": Optional[bool], "enable_logs": Optional[bool], }, total=False, @@ -96,11 +88,6 @@ class CompressionAlgo(Enum): ] -class INSTRUMENTER: - SENTRY = "sentry" - OTEL = "otel" - - class SPANDATA: """ Additional information describing the type of the span. @@ -174,7 +161,7 @@ class SPANDATA: AI_TOOL_CALLS = "ai.tool_calls" """ - For an AI model call, the function that was called. This is deprecated for OpenAI, and replaced by tool_calls + For an AI model call, the function that was called. """ AI_TOOLS = "ai.tools" @@ -491,6 +478,46 @@ class OP: SOCKET_DNS = "socket.dns" +BAGGAGE_HEADER_NAME = "baggage" +SENTRY_TRACE_HEADER_NAME = "sentry-trace" + +DEFAULT_SPAN_ORIGIN = "manual" +DEFAULT_SPAN_NAME = "" + + +# Transaction source +# see https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations +class TransactionSource(str, Enum): + COMPONENT = "component" + CUSTOM = "custom" + ROUTE = "route" + TASK = "task" + URL = "url" + VIEW = "view" + + def __str__(self): + # type: () -> str + return self.value + + +# These are typically high cardinality and the server hates them +LOW_QUALITY_TRANSACTION_SOURCES = [ + TransactionSource.URL, +] + +SOURCE_FOR_STYLE = { + "endpoint": TransactionSource.COMPONENT, + "function_name": TransactionSource.COMPONENT, + "handler_name": TransactionSource.COMPONENT, + "method_and_path_pattern": TransactionSource.ROUTE, + "path": TransactionSource.URL, + "route_name": TransactionSource.COMPONENT, + "route_pattern": TransactionSource.ROUTE, + "uri_template": TransactionSource.ROUTE, + "url": TransactionSource.ROUTE, +} + + # This type exists to trick mypy and PyCharm into thinking `init` and `Client` # take these arguments (even though they take opaque **kwargs) class ClientConstructor: @@ -524,7 +551,6 @@ def __init__( debug=None, # type: Optional[bool] attach_stacktrace=False, # type: bool ca_certs=None, # type: Optional[str] - propagate_traces=True, # type: bool traces_sample_rate=None, # type: Optional[float] traces_sampler=None, # type: Optional[TracesSampler] profiles_sample_rate=None, # type: Optional[float] @@ -538,10 +564,8 @@ def __init__( send_client_reports=True, # type: bool _experiments={}, # type: Experiments # noqa: B006 proxy_headers=None, # type: Optional[Dict[str, str]] - instrumenter=INSTRUMENTER.SENTRY, # type: Optional[str] before_send_transaction=None, # type: Optional[TransactionProcessor] project_root=None, # type: Optional[str] - enable_tracing=None, # type: Optional[bool] include_local_variables=True, # type: Optional[bool] include_source_context=True, # type: Optional[bool] trace_propagation_targets=[ # noqa: B006 @@ -930,11 +954,6 @@ def __init__( :param profile_session_sample_rate: - - :param enable_tracing: - - :param propagate_traces: - :param auto_session_tracking: :param spotlight: @@ -966,4 +985,4 @@ def _get_default_options(): del _get_default_options -VERSION = "2.27.0" +VERSION = "3.0.0" diff --git a/sentry_sdk/debug.py b/sentry_sdk/debug.py index e4c686a3e8..c0c30fdd5d 100644 --- a/sentry_sdk/debug.py +++ b/sentry_sdk/debug.py @@ -1,6 +1,5 @@ import sys import logging -import warnings from sentry_sdk import get_client from sentry_sdk.client import _client_init_debug @@ -30,12 +29,3 @@ def configure_logger(): logger.addHandler(_handler) logger.setLevel(logging.DEBUG) logger.addFilter(_DebugFilter()) - - -def configure_debug_hub(): - # type: () -> None - warnings.warn( - "configure_debug_hub is deprecated. Please remove calls to it, as it is a no-op.", - DeprecationWarning, - stacklevel=2, - ) diff --git a/sentry_sdk/envelope.py b/sentry_sdk/envelope.py index 5f7220bf21..378028377b 100644 --- a/sentry_sdk/envelope.py +++ b/sentry_sdk/envelope.py @@ -280,8 +280,6 @@ def data_category(self): return "profile" elif ty == "profile_chunk": return "profile_chunk" - elif ty == "statsd": - return "metric_bucket" elif ty == "check_in": return "monitor" else: @@ -341,7 +339,7 @@ def deserialize_from( # if no length was specified we need to read up to the end of line # and remove it (if it is present, i.e. not the very last char in an eof terminated envelope) payload = f.readline().rstrip(b"\n") - if headers.get("type") in ("event", "transaction", "metric_buckets"): + if headers.get("type") in ("event", "transaction"): rv = cls(headers=headers, payload=PayloadRef(json=parse_json(payload))) else: rv = cls(headers=headers, payload=payload) diff --git a/sentry_sdk/feature_flags.py b/sentry_sdk/feature_flags.py index dd8d41c32e..ea551edd20 100644 --- a/sentry_sdk/feature_flags.py +++ b/sentry_sdk/feature_flags.py @@ -69,4 +69,4 @@ def add_feature_flag(flag, result): span = sentry_sdk.get_current_span() if span: - span.set_flag(f"flag.evaluation.{flag}", result) + span.set_flag(flag, result) diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py deleted file mode 100644 index 7fda9202df..0000000000 --- a/sentry_sdk/hub.py +++ /dev/null @@ -1,739 +0,0 @@ -import warnings -from contextlib import contextmanager - -from sentry_sdk import ( - get_client, - get_global_scope, - get_isolation_scope, - get_current_scope, -) -from sentry_sdk._compat import with_metaclass -from sentry_sdk.consts import INSTRUMENTER -from sentry_sdk.scope import _ScopeManager -from sentry_sdk.client import Client -from sentry_sdk.tracing import ( - NoOpSpan, - Span, - Transaction, -) - -from sentry_sdk.utils import ( - logger, - ContextVar, -) - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from typing import Any - from typing import Callable - from typing import ContextManager - from typing import Dict - from typing import Generator - from typing import List - from typing import Optional - from typing import overload - from typing import Tuple - from typing import Type - from typing import TypeVar - from typing import Union - - from typing_extensions import Unpack - - from sentry_sdk.scope import Scope - from sentry_sdk.client import BaseClient - from sentry_sdk.integrations import Integration - from sentry_sdk._types import ( - Event, - Hint, - Breadcrumb, - BreadcrumbHint, - ExcInfo, - LogLevelStr, - SamplingContext, - ) - from sentry_sdk.tracing import TransactionKwargs - - T = TypeVar("T") - -else: - - def overload(x): - # type: (T) -> T - return x - - -class SentryHubDeprecationWarning(DeprecationWarning): - """ - A custom deprecation warning to inform users that the Hub is deprecated. - """ - - _MESSAGE = ( - "`sentry_sdk.Hub` is deprecated and will be removed in a future major release. " - "Please consult our 1.x to 2.x migration guide for details on how to migrate " - "`Hub` usage to the new API: " - "https://docs.sentry.io/platforms/python/migration/1.x-to-2.x" - ) - - def __init__(self, *_): - # type: (*object) -> None - super().__init__(self._MESSAGE) - - -@contextmanager -def _suppress_hub_deprecation_warning(): - # type: () -> Generator[None, None, None] - """Utility function to suppress deprecation warnings for the Hub.""" - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", category=SentryHubDeprecationWarning) - yield - - -_local = ContextVar("sentry_current_hub") - - -class HubMeta(type): - @property - def current(cls): - # type: () -> Hub - """Returns the current instance of the hub.""" - warnings.warn(SentryHubDeprecationWarning(), stacklevel=2) - rv = _local.get(None) - if rv is None: - with _suppress_hub_deprecation_warning(): - # This will raise a deprecation warning; suppress it since we already warned above. - rv = Hub(GLOBAL_HUB) - _local.set(rv) - return rv - - @property - def main(cls): - # type: () -> Hub - """Returns the main instance of the hub.""" - warnings.warn(SentryHubDeprecationWarning(), stacklevel=2) - return GLOBAL_HUB - - -class Hub(with_metaclass(HubMeta)): # type: ignore - """ - .. deprecated:: 2.0.0 - The Hub is deprecated. Its functionality will be merged into :py:class:`sentry_sdk.scope.Scope`. - - The hub wraps the concurrency management of the SDK. Each thread has - its own hub but the hub might transfer with the flow of execution if - context vars are available. - - If the hub is used with a with statement it's temporarily activated. - """ - - _stack = None # type: List[Tuple[Optional[Client], Scope]] - _scope = None # type: Optional[Scope] - - # Mypy doesn't pick up on the metaclass. - - if TYPE_CHECKING: - current = None # type: Hub - main = None # type: Hub - - def __init__( - self, - client_or_hub=None, # type: Optional[Union[Hub, Client]] - scope=None, # type: Optional[Any] - ): - # type: (...) -> None - warnings.warn(SentryHubDeprecationWarning(), stacklevel=2) - - current_scope = None - - if isinstance(client_or_hub, Hub): - client = get_client() - if scope is None: - # hub cloning is going on, we use a fork of the current/isolation scope for context manager - scope = get_isolation_scope().fork() - current_scope = get_current_scope().fork() - else: - client = client_or_hub # type: ignore - get_global_scope().set_client(client) - - if scope is None: # so there is no Hub cloning going on - # just the current isolation scope is used for context manager - scope = get_isolation_scope() - current_scope = get_current_scope() - - if current_scope is None: - # just the current current scope is used for context manager - current_scope = get_current_scope() - - self._stack = [(client, scope)] # type: ignore - self._last_event_id = None # type: Optional[str] - self._old_hubs = [] # type: List[Hub] - - self._old_current_scopes = [] # type: List[Scope] - self._old_isolation_scopes = [] # type: List[Scope] - self._current_scope = current_scope # type: Scope - self._scope = scope # type: Scope - - def __enter__(self): - # type: () -> Hub - self._old_hubs.append(Hub.current) - _local.set(self) - - current_scope = get_current_scope() - self._old_current_scopes.append(current_scope) - scope._current_scope.set(self._current_scope) - - isolation_scope = get_isolation_scope() - self._old_isolation_scopes.append(isolation_scope) - scope._isolation_scope.set(self._scope) - - return self - - def __exit__( - self, - exc_type, # type: Optional[type] - exc_value, # type: Optional[BaseException] - tb, # type: Optional[Any] - ): - # type: (...) -> None - old = self._old_hubs.pop() - _local.set(old) - - old_current_scope = self._old_current_scopes.pop() - scope._current_scope.set(old_current_scope) - - old_isolation_scope = self._old_isolation_scopes.pop() - scope._isolation_scope.set(old_isolation_scope) - - def run( - self, callback # type: Callable[[], T] - ): - # type: (...) -> T - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - - Runs a callback in the context of the hub. Alternatively the - with statement can be used on the hub directly. - """ - with self: - return callback() - - def get_integration( - self, name_or_class # type: Union[str, Type[Integration]] - ): - # type: (...) -> Any - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.client._Client.get_integration` instead. - - Returns the integration for this hub by name or class. If there - is no client bound or the client does not have that integration - then `None` is returned. - - If the return value is not `None` the hub is guaranteed to have a - client attached. - """ - return get_client().get_integration(name_or_class) - - @property - def client(self): - # type: () -> Optional[BaseClient] - """ - .. deprecated:: 2.0.0 - This property is deprecated and will be removed in a future release. - Please use :py:func:`sentry_sdk.api.get_client` instead. - - Returns the current client on the hub. - """ - client = get_client() - - if not client.is_active(): - return None - - return client - - @property - def scope(self): - # type: () -> Scope - """ - .. deprecated:: 2.0.0 - This property is deprecated and will be removed in a future release. - Returns the current scope on the hub. - """ - return get_isolation_scope() - - def last_event_id(self): - # type: () -> Optional[str] - """ - Returns the last event ID. - - .. deprecated:: 1.40.5 - This function is deprecated and will be removed in a future release. The functions `capture_event`, `capture_message`, and `capture_exception` return the event ID directly. - """ - logger.warning( - "Deprecated: last_event_id is deprecated. This will be removed in the future. The functions `capture_event`, `capture_message`, and `capture_exception` return the event ID directly." - ) - return self._last_event_id - - def bind_client( - self, new # type: Optional[BaseClient] - ): - # type: (...) -> None - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.set_client` instead. - - Binds a new client to the hub. - """ - get_global_scope().set_client(new) - - def capture_event(self, event, hint=None, scope=None, **scope_kwargs): - # type: (Event, Optional[Hint], Optional[Scope], Any) -> Optional[str] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.capture_event` instead. - - Captures an event. - - Alias of :py:meth:`sentry_sdk.Scope.capture_event`. - - :param event: A ready-made event that can be directly sent to Sentry. - - :param hint: Contains metadata about the event that can be read from `before_send`, such as the original exception object or a HTTP request object. - - :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events. - The `scope` and `scope_kwargs` parameters are mutually exclusive. - - :param scope_kwargs: Optional data to apply to event. - For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`. - The `scope` and `scope_kwargs` parameters are mutually exclusive. - """ - last_event_id = get_current_scope().capture_event( - event, hint, scope=scope, **scope_kwargs - ) - - is_transaction = event.get("type") == "transaction" - if last_event_id is not None and not is_transaction: - self._last_event_id = last_event_id - - return last_event_id - - def capture_message(self, message, level=None, scope=None, **scope_kwargs): - # type: (str, Optional[LogLevelStr], Optional[Scope], Any) -> Optional[str] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.capture_message` instead. - - Captures a message. - - Alias of :py:meth:`sentry_sdk.Scope.capture_message`. - - :param message: The string to send as the message to Sentry. - - :param level: If no level is provided, the default level is `info`. - - :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events. - The `scope` and `scope_kwargs` parameters are mutually exclusive. - - :param scope_kwargs: Optional data to apply to event. - For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`. - The `scope` and `scope_kwargs` parameters are mutually exclusive. - - :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`). - """ - last_event_id = get_current_scope().capture_message( - message, level=level, scope=scope, **scope_kwargs - ) - - if last_event_id is not None: - self._last_event_id = last_event_id - - return last_event_id - - def capture_exception(self, error=None, scope=None, **scope_kwargs): - # type: (Optional[Union[BaseException, ExcInfo]], Optional[Scope], Any) -> Optional[str] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.capture_exception` instead. - - Captures an exception. - - Alias of :py:meth:`sentry_sdk.Scope.capture_exception`. - - :param error: An exception to capture. If `None`, `sys.exc_info()` will be used. - - :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events. - The `scope` and `scope_kwargs` parameters are mutually exclusive. - - :param scope_kwargs: Optional data to apply to event. - For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`. - The `scope` and `scope_kwargs` parameters are mutually exclusive. - - :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`). - """ - last_event_id = get_current_scope().capture_exception( - error, scope=scope, **scope_kwargs - ) - - if last_event_id is not None: - self._last_event_id = last_event_id - - return last_event_id - - def add_breadcrumb(self, crumb=None, hint=None, **kwargs): - # type: (Optional[Breadcrumb], Optional[BreadcrumbHint], Any) -> None - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.add_breadcrumb` instead. - - Adds a breadcrumb. - - :param crumb: Dictionary with the data as the sentry v7/v8 protocol expects. - - :param hint: An optional value that can be used by `before_breadcrumb` - to customize the breadcrumbs that are emitted. - """ - get_isolation_scope().add_breadcrumb(crumb, hint, **kwargs) - - def start_span(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): - # type: (str, Any) -> Span - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.start_span` instead. - - Start a span whose parent is the currently active span or transaction, if any. - - The return value is a :py:class:`sentry_sdk.tracing.Span` instance, - typically used as a context manager to start and stop timing in a `with` - block. - - Only spans contained in a transaction are sent to Sentry. Most - integrations start a transaction at the appropriate time, for example - for every incoming HTTP request. Use - :py:meth:`sentry_sdk.start_transaction` to start a new transaction when - one is not already in progress. - - For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`. - """ - scope = get_current_scope() - return scope.start_span(instrumenter=instrumenter, **kwargs) - - def start_transaction( - self, - transaction=None, - instrumenter=INSTRUMENTER.SENTRY, - custom_sampling_context=None, - **kwargs - ): - # type: (Optional[Transaction], str, Optional[SamplingContext], Unpack[TransactionKwargs]) -> Union[Transaction, NoOpSpan] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.start_transaction` instead. - - Start and return a transaction. - - Start an existing transaction if given, otherwise create and start a new - transaction with kwargs. - - This is the entry point to manual tracing instrumentation. - - A tree structure can be built by adding child spans to the transaction, - and child spans to other spans. To start a new child span within the - transaction or any span, call the respective `.start_child()` method. - - Every child span must be finished before the transaction is finished, - otherwise the unfinished spans are discarded. - - When used as context managers, spans and transactions are automatically - finished at the end of the `with` block. If not using context managers, - call the `.finish()` method. - - When the transaction is finished, it will be sent to Sentry with all its - finished child spans. - - For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Transaction`. - """ - scope = get_current_scope() - - # For backwards compatibility, we allow passing the scope as the hub. - # We need a major release to make this nice. (if someone searches the code: deprecated) - # Type checking disabled for this line because deprecated keys are not allowed in the type signature. - kwargs["hub"] = scope # type: ignore - - return scope.start_transaction( - transaction, instrumenter, custom_sampling_context, **kwargs - ) - - def continue_trace(self, environ_or_headers, op=None, name=None, source=None): - # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str]) -> Transaction - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.continue_trace` instead. - - Sets the propagation context from environment or headers and returns a transaction. - """ - return get_isolation_scope().continue_trace( - environ_or_headers=environ_or_headers, op=op, name=name, source=source - ) - - @overload - def push_scope( - self, callback=None # type: Optional[None] - ): - # type: (...) -> ContextManager[Scope] - pass - - @overload - def push_scope( # noqa: F811 - self, callback # type: Callable[[Scope], None] - ): - # type: (...) -> None - pass - - def push_scope( # noqa - self, - callback=None, # type: Optional[Callable[[Scope], None]] - continue_trace=True, # type: bool - ): - # type: (...) -> Optional[ContextManager[Scope]] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - - Pushes a new layer on the scope stack. - - :param callback: If provided, this method pushes a scope, calls - `callback`, and pops the scope again. - - :returns: If no `callback` is provided, a context manager that should - be used to pop the scope again. - """ - if callback is not None: - with self.push_scope() as scope: - callback(scope) - return None - - return _ScopeManager(self) - - def pop_scope_unsafe(self): - # type: () -> Tuple[Optional[Client], Scope] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - - Pops a scope layer from the stack. - - Try to use the context manager :py:meth:`push_scope` instead. - """ - rv = self._stack.pop() - assert self._stack, "stack must have at least one layer" - return rv - - @overload - def configure_scope( - self, callback=None # type: Optional[None] - ): - # type: (...) -> ContextManager[Scope] - pass - - @overload - def configure_scope( # noqa: F811 - self, callback # type: Callable[[Scope], None] - ): - # type: (...) -> None - pass - - def configure_scope( # noqa - self, - callback=None, # type: Optional[Callable[[Scope], None]] - continue_trace=True, # type: bool - ): - # type: (...) -> Optional[ContextManager[Scope]] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - - Reconfigures the scope. - - :param callback: If provided, call the callback with the current scope. - - :returns: If no callback is provided, returns a context manager that returns the scope. - """ - scope = get_isolation_scope() - - if continue_trace: - scope.generate_propagation_context() - - if callback is not None: - # TODO: used to return None when client is None. Check if this changes behavior. - callback(scope) - - return None - - @contextmanager - def inner(): - # type: () -> Generator[Scope, None, None] - yield scope - - return inner() - - def start_session( - self, session_mode="application" # type: str - ): - # type: (...) -> None - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.start_session` instead. - - Starts a new session. - """ - get_isolation_scope().start_session( - session_mode=session_mode, - ) - - def end_session(self): - # type: (...) -> None - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.end_session` instead. - - Ends the current session if there is one. - """ - get_isolation_scope().end_session() - - def stop_auto_session_tracking(self): - # type: (...) -> None - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.stop_auto_session_tracking` instead. - - Stops automatic session tracking. - - This temporarily session tracking for the current scope when called. - To resume session tracking call `resume_auto_session_tracking`. - """ - get_isolation_scope().stop_auto_session_tracking() - - def resume_auto_session_tracking(self): - # type: (...) -> None - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.resume_auto_session_tracking` instead. - - Resumes automatic session tracking for the current scope if - disabled earlier. This requires that generally automatic session - tracking is enabled. - """ - get_isolation_scope().resume_auto_session_tracking() - - def flush( - self, - timeout=None, # type: Optional[float] - callback=None, # type: Optional[Callable[[int, float], None]] - ): - # type: (...) -> None - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.client._Client.flush` instead. - - Alias for :py:meth:`sentry_sdk.client._Client.flush` - """ - return get_client().flush(timeout=timeout, callback=callback) - - def get_traceparent(self): - # type: () -> Optional[str] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.get_traceparent` instead. - - Returns the traceparent either from the active span or from the scope. - """ - current_scope = get_current_scope() - traceparent = current_scope.get_traceparent() - - if traceparent is None: - isolation_scope = get_isolation_scope() - traceparent = isolation_scope.get_traceparent() - - return traceparent - - def get_baggage(self): - # type: () -> Optional[str] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.get_baggage` instead. - - Returns Baggage either from the active span or from the scope. - """ - current_scope = get_current_scope() - baggage = current_scope.get_baggage() - - if baggage is None: - isolation_scope = get_isolation_scope() - baggage = isolation_scope.get_baggage() - - if baggage is not None: - return baggage.serialize() - - return None - - def iter_trace_propagation_headers(self, span=None): - # type: (Optional[Span]) -> Generator[Tuple[str, str], None, None] - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.iter_trace_propagation_headers` instead. - - Return HTTP headers which allow propagation of trace data. Data taken - from the span representing the request, if available, or the current - span on the scope if not. - """ - return get_current_scope().iter_trace_propagation_headers( - span=span, - ) - - def trace_propagation_meta(self, span=None): - # type: (Optional[Span]) -> str - """ - .. deprecated:: 2.0.0 - This function is deprecated and will be removed in a future release. - Please use :py:meth:`sentry_sdk.Scope.trace_propagation_meta` instead. - - Return meta tags which should be injected into HTML templates - to allow propagation of trace information. - """ - if span is not None: - logger.warning( - "The parameter `span` in trace_propagation_meta() is deprecated and will be removed in the future." - ) - - return get_current_scope().trace_propagation_meta( - span=span, - ) - - -with _suppress_hub_deprecation_warning(): - # Suppress deprecation warning for the Hub here, since we still always - # import this module. - GLOBAL_HUB = Hub() -_local.set(GLOBAL_HUB) - - -# Circular imports -from sentry_sdk import scope diff --git a/sentry_sdk/integrations/__init__.py b/sentry_sdk/integrations/__init__.py index 118289950c..f2d1a28522 100644 --- a/sentry_sdk/integrations/__init__.py +++ b/sentry_sdk/integrations/__init__.py @@ -131,10 +131,11 @@ def iter_default_integrations(with_auto_enabling_integrations): "celery": (4, 4, 7), "chalice": (1, 16, 0), "clickhouse_driver": (0, 2, 0), + "common": (1, 4, 0), # opentelemetry-sdk "cohere": (5, 4, 0), - "django": (1, 8), + "django": (2, 0), "dramatiq": (1, 9), - "falcon": (1, 4), + "falcon": (3, 0), "fastapi": (0, 79, 0), "flask": (1, 1, 4), "gql": (3, 4, 1), @@ -157,6 +158,7 @@ def iter_default_integrations(with_auto_enabling_integrations): "statsig": (0, 55, 3), "strawberry": (0, 209, 5), "tornado": (6, 0), + "trytond": (5, 0), "typer": (0, 15), "unleash": (6, 0, 1), } diff --git a/sentry_sdk/integrations/_asgi_common.py b/sentry_sdk/integrations/_asgi_common.py index c16bbbcfe8..52ecdbfd58 100644 --- a/sentry_sdk/integrations/_asgi_common.py +++ b/sentry_sdk/integrations/_asgi_common.py @@ -21,7 +21,7 @@ def _get_headers(asgi_scope): Extract headers from the ASGI scope, in the format that the Sentry protocol expects. """ headers = {} # type: Dict[str, str] - for raw_key, raw_value in asgi_scope["headers"]: + for raw_key, raw_value in asgi_scope.get("headers", {}): key = raw_key.decode("latin-1") value = raw_value.decode("latin-1") if key in headers: @@ -32,8 +32,8 @@ def _get_headers(asgi_scope): return headers -def _get_url(asgi_scope, default_scheme, host): - # type: (Dict[str, Any], Literal["ws", "http"], Optional[Union[AnnotatedValue, str]]) -> str +def _get_url(asgi_scope, default_scheme=None, host=None): + # type: (Dict[str, Any], Optional[Literal["ws", "http"]], Optional[Union[AnnotatedValue, str]]) -> str """ Extract URL from the ASGI scope, without also including the querystring. """ diff --git a/sentry_sdk/integrations/_wsgi_common.py b/sentry_sdk/integrations/_wsgi_common.py index 48bc432887..2d4a5f7b73 100644 --- a/sentry_sdk/integrations/_wsgi_common.py +++ b/sentry_sdk/integrations/_wsgi_common.py @@ -1,10 +1,9 @@ -from contextlib import contextmanager import json from copy import deepcopy import sentry_sdk from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.utils import AnnotatedValue, logger +from sentry_sdk.utils import AnnotatedValue, SENSITIVE_DATA_SUBSTITUTE try: from django.http.request import RawPostDataException @@ -16,12 +15,11 @@ if TYPE_CHECKING: from typing import Any from typing import Dict - from typing import Iterator from typing import Mapping from typing import MutableMapping from typing import Optional from typing import Union - from sentry_sdk._types import Event, HttpStatusCodeRange + from sentry_sdk._types import Event SENSITIVE_ENV_KEYS = ( @@ -52,13 +50,6 @@ ) -# This noop context manager can be replaced with "from contextlib import nullcontext" when we drop Python 3.6 support -@contextmanager -def nullcontext(): - # type: () -> Iterator[None] - yield - - def request_body_within_bounds(client, content_length): # type: (Optional[sentry_sdk.client.BaseClient], int) -> bool if client is None: @@ -237,35 +228,15 @@ def _filter_headers(headers): } -def _in_http_status_code_range(code, code_ranges): - # type: (object, list[HttpStatusCodeRange]) -> bool - for target in code_ranges: - if isinstance(target, int): - if code == target: - return True - continue - - try: - if code in target: - return True - except TypeError: - logger.warning( - "failed_request_status_codes has to be a list of integers or containers" - ) - - return False - +def _request_headers_to_span_attributes(headers): + # type: (dict[str, str]) -> dict[str, str] + attributes = {} -class HttpCodeRangeContainer: - """ - Wrapper to make it possible to use list[HttpStatusCodeRange] as a Container[int]. - Used for backwards compatibility with the old `failed_request_status_codes` option. - """ + headers = _filter_headers(headers) - def __init__(self, code_ranges): - # type: (list[HttpStatusCodeRange]) -> None - self._code_ranges = code_ranges + for header, value in headers.items(): + if isinstance(value, AnnotatedValue): + value = SENSITIVE_DATA_SUBSTITUTE + attributes[f"http.request.header.{header.lower()}"] = value - def __contains__(self, item): - # type: (object) -> bool - return _in_http_status_code_range(item, self._code_ranges) + return attributes diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py index ad3202bf2c..bcdd964b8d 100644 --- a/sentry_sdk/integrations/aiohttp.py +++ b/sentry_sdk/integrations/aiohttp.py @@ -3,8 +3,14 @@ from functools import wraps import sentry_sdk -from sentry_sdk.api import continue_trace -from sentry_sdk.consts import OP, SPANSTATUS, SPANDATA +from sentry_sdk.consts import ( + OP, + SPANSTATUS, + SPANDATA, + BAGGAGE_HEADER_NAME, + SOURCE_FOR_STYLE, + TransactionSource, +) from sentry_sdk.integrations import ( _DEFAULT_FAILED_REQUEST_STATUS_CODES, _check_minimum_version, @@ -15,18 +21,15 @@ from sentry_sdk.sessions import track_session from sentry_sdk.integrations._wsgi_common import ( _filter_headers, + _request_headers_to_span_attributes, request_body_within_bounds, ) -from sentry_sdk.tracing import ( - BAGGAGE_HEADER_NAME, - SOURCE_FOR_STYLE, - TransactionSource, -) from sentry_sdk.tracing_utils import should_propagate_trace from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, event_from_exception, + http_client_status_to_breadcrumb_level, logger, parse_url, parse_version, @@ -67,6 +70,13 @@ TRANSACTION_STYLE_VALUES = ("handler_name", "method_and_path_pattern") +REQUEST_PROPERTY_TO_ATTRIBUTE = { + "query_string": "url.query", + "method": "http.request.method", + "scheme": "url.scheme", + "path": "url.path", +} + class AioHttpIntegration(Integration): identifier = "aiohttp" @@ -123,51 +133,38 @@ async def sentry_app_handle(self, request, *args, **kwargs): scope.add_event_processor(_make_request_processor(weak_request)) headers = dict(request.headers) - transaction = continue_trace( - headers, - op=OP.HTTP_SERVER, - # If this transaction name makes it to the UI, AIOHTTP's - # URL resolver did not find a route or died trying. - name="generic AIOHTTP request", - source=TransactionSource.ROUTE, - origin=AioHttpIntegration.origin, - ) - with sentry_sdk.start_transaction( - transaction, - custom_sampling_context={"aiohttp_request": request}, - ): - try: - response = await old_handle(self, request) - except HTTPException as e: - transaction.set_http_status(e.status_code) - - if ( - e.status_code - in integration._failed_request_status_codes - ): - _capture_exception() - - raise - except (asyncio.CancelledError, ConnectionResetError): - transaction.set_status(SPANSTATUS.CANCELLED) - raise - except Exception: - # This will probably map to a 500 but seems like we - # have no way to tell. Do not set span status. - reraise(*_capture_exception()) - - try: - # A valid response handler will return a valid response with a status. But, if the handler - # returns an invalid response (e.g. None), the line below will raise an AttributeError. - # Even though this is likely invalid, we need to handle this case to ensure we don't break - # the application. - response_status = response.status - except AttributeError: - pass - else: - transaction.set_http_status(response_status) - - return response + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span( + op=OP.HTTP_SERVER, + # If this transaction name makes it to the UI, AIOHTTP's + # URL resolver did not find a route or died trying. + name="generic AIOHTTP request", + source=TransactionSource.ROUTE, + origin=AioHttpIntegration.origin, + attributes=_prepopulate_attributes(request), + ) as span: + try: + response = await old_handle(self, request) + except HTTPException as e: + span.set_http_status(e.status_code) + + if ( + e.status_code + in integration._failed_request_status_codes + ): + _capture_exception() + + raise + except (asyncio.CancelledError, ConnectionResetError): + span.set_status(SPANSTATUS.CANCELLED) + raise + except Exception: + # This will probably map to a 500 but seems like we + # have no way to tell. Do not set span status. + reraise(*_capture_exception()) + + span.set_http_status(response.status) + return response Application._handle = sentry_app_handle @@ -238,12 +235,19 @@ async def on_request_start(session, trace_config_ctx, params): name="%s %s" % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE), origin=AioHttpIntegration.origin, + only_if_parent=True, ) - span.set_data(SPANDATA.HTTP_METHOD, method) + + data = { + SPANDATA.HTTP_METHOD: method, + } if parsed_url is not None: - span.set_data("url", parsed_url.url) - span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) - span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) + data["url"] = parsed_url.url + data[SPANDATA.HTTP_QUERY] = parsed_url.query + data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment + + for key, value in data.items(): + span.set_attribute(key, value) client = sentry_sdk.get_client() @@ -268,15 +272,28 @@ async def on_request_start(session, trace_config_ctx, params): params.headers[key] = value trace_config_ctx.span = span + trace_config_ctx.span_data = data async def on_request_end(session, trace_config_ctx, params): # type: (ClientSession, SimpleNamespace, TraceRequestEndParams) -> None if trace_config_ctx.span is None: return + span_data = trace_config_ctx.span_data or {} + status_code = int(params.response.status) + span_data[SPANDATA.HTTP_STATUS_CODE] = status_code + span_data["reason"] = params.response.reason + + sentry_sdk.add_breadcrumb( + type="http", + category="httplib", + data=span_data, + level=http_client_status_to_breadcrumb_level(status_code), + ) + span = trace_config_ctx.span span.set_http_status(int(params.response.status)) - span.set_data("reason", params.response.reason) + span.set_attribute("reason", params.response.reason) span.finish() trace_config = TraceConfig() @@ -355,3 +372,30 @@ def get_aiohttp_request_data(request): # request has no body return None + + +def _prepopulate_attributes(request): + # type: (Request) -> dict[str, Any] + """Construct initial span attributes that can be used in traces sampler.""" + attributes = {} + + for prop, attr in REQUEST_PROPERTY_TO_ATTRIBUTE.items(): + if getattr(request, prop, None) is not None: + attributes[attr] = getattr(request, prop) + + if getattr(request, "host", None) is not None: + try: + host, port = request.host.split(":") + attributes["server.address"] = host + attributes["server.port"] = port + except ValueError: + attributes["server.address"] = request.host + + with capture_internal_exceptions(): + url = f"{request.scheme}://{request.host}{request.path}" # noqa: E231 + if request.query_string: + attributes["url.full"] = f"{url}?{request.query_string}" + + attributes.update(_request_headers_to_span_attributes(dict(request.headers))) + + return attributes diff --git a/sentry_sdk/integrations/anthropic.py b/sentry_sdk/integrations/anthropic.py index 76a3bb9f13..454b6f93ca 100644 --- a/sentry_sdk/integrations/anthropic.py +++ b/sentry_sdk/integrations/anthropic.py @@ -121,13 +121,13 @@ def _add_ai_data_to_span( with capture_internal_exceptions(): if should_send_default_pii() and integration.include_prompts: complete_message = "".join(content_blocks) - span.set_data( + span.set_attribute( SPANDATA.AI_RESPONSES, [{"type": "text", "text": complete_message}], ) total_tokens = input_tokens + output_tokens record_token_usage(span, input_tokens, output_tokens, total_tokens) - span.set_data(SPANDATA.AI_STREAMING, True) + span.set_attribute(SPANDATA.AI_STREAMING, True) def _sentry_patched_create_common(f, *args, **kwargs): @@ -148,6 +148,7 @@ def _sentry_patched_create_common(f, *args, **kwargs): op=OP.ANTHROPIC_MESSAGES_CREATE, description="Anthropic messages create", origin=AnthropicIntegration.origin, + only_if_parent=True, ) span.__enter__() @@ -158,15 +159,17 @@ def _sentry_patched_create_common(f, *args, **kwargs): model = kwargs.get("model") with capture_internal_exceptions(): - span.set_data(SPANDATA.AI_MODEL_ID, model) - span.set_data(SPANDATA.AI_STREAMING, False) + span.set_attribute(SPANDATA.AI_MODEL_ID, model) + span.set_attribute(SPANDATA.AI_STREAMING, False) if should_send_default_pii() and integration.include_prompts: - span.set_data(SPANDATA.AI_INPUT_MESSAGES, messages) + span.set_attribute(SPANDATA.AI_INPUT_MESSAGES, messages) if hasattr(result, "content"): if should_send_default_pii() and integration.include_prompts: - span.set_data(SPANDATA.AI_RESPONSES, _get_responses(result.content)) + span.set_attribute( + SPANDATA.AI_RESPONSES, _get_responses(result.content) + ) _calculate_token_usage(result, span) span.__exit__(None, None, None) @@ -214,7 +217,7 @@ async def new_iterator_async(): result._iterator = new_iterator() else: - span.set_data("unknown_response", True) + span.set_attribute("unknown_response", True) span.__exit__(None, None, None) return result diff --git a/sentry_sdk/integrations/arq.py b/sentry_sdk/integrations/arq.py index 1ea8e32fb3..e9dd9d92ac 100644 --- a/sentry_sdk/integrations/arq.py +++ b/sentry_sdk/integrations/arq.py @@ -5,7 +5,7 @@ from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import Transaction, TransactionSource +from sentry_sdk.tracing import TransactionSource from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, @@ -37,6 +37,8 @@ ARQ_CONTROL_FLOW_EXCEPTIONS = (JobExecutionFailed, Retry, RetryJob) +DEFAULT_TRANSACTION_NAME = "unknown arq task" + class ArqIntegration(Integration): identifier = "arq" @@ -76,7 +78,10 @@ async def _sentry_enqueue_job(self, function, *args, **kwargs): return await old_enqueue_job(self, function, *args, **kwargs) with sentry_sdk.start_span( - op=OP.QUEUE_SUBMIT_ARQ, name=function, origin=ArqIntegration.origin + op=OP.QUEUE_SUBMIT_ARQ, + name=function, + origin=ArqIntegration.origin, + only_if_parent=True, ): return await old_enqueue_job(self, function, *args, **kwargs) @@ -96,18 +101,24 @@ async def _sentry_run_job(self, job_id, score): with sentry_sdk.isolation_scope() as scope: scope._name = "arq" + scope.set_transaction_name( + DEFAULT_TRANSACTION_NAME, + source=TransactionSource.TASK, + ) scope.clear_breadcrumbs() - transaction = Transaction( - name="unknown arq task", - status="ok", + with sentry_sdk.start_span( op=OP.QUEUE_TASK_ARQ, + name=DEFAULT_TRANSACTION_NAME, source=TransactionSource.TASK, origin=ArqIntegration.origin, - ) + ) as span: + return_value = await old_run_job(self, job_id, score) + + if span.status is None: + span.set_status(SPANSTATUS.OK) - with sentry_sdk.start_transaction(transaction): - return await old_run_job(self, job_id, score) + return return_value Worker.run_job = _sentry_run_job @@ -116,12 +127,12 @@ def _capture_exception(exc_info): # type: (ExcInfo) -> None scope = sentry_sdk.get_current_scope() - if scope.transaction is not None: + if scope.root_span is not None: if exc_info[0] in ARQ_CONTROL_FLOW_EXCEPTIONS: - scope.transaction.set_status(SPANSTATUS.ABORTED) + scope.root_span.set_status(SPANSTATUS.ABORTED) return - scope.transaction.set_status(SPANSTATUS.INTERNAL_ERROR) + scope.root_span.set_status(SPANSTATUS.INTERNAL_ERROR) event, hint = event_from_exception( exc_info, @@ -138,8 +149,8 @@ def event_processor(event, hint): with capture_internal_exceptions(): scope = sentry_sdk.get_current_scope() - if scope.transaction is not None: - scope.transaction.name = ctx["job_name"] + if scope.root_span is not None: + scope.root_span.name = ctx["job_name"] event["transaction"] = ctx["job_name"] tags = event.setdefault("tags", {}) diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py index fc8ee29b1a..a8a5e46c8b 100644 --- a/sentry_sdk/integrations/asgi.py +++ b/sentry_sdk/integrations/asgi.py @@ -10,25 +10,22 @@ from functools import partial import sentry_sdk -from sentry_sdk.api import continue_trace -from sentry_sdk.consts import OP +from sentry_sdk.consts import OP, SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.integrations._asgi_common import ( _get_headers, + _get_query, _get_request_data, _get_url, ) from sentry_sdk.integrations._wsgi_common import ( DEFAULT_HTTP_METHODS_TO_CAPTURE, - nullcontext, + _request_headers_to_span_attributes, ) from sentry_sdk.sessions import track_session -from sentry_sdk.tracing import ( - SOURCE_FOR_STYLE, - TransactionSource, -) from sentry_sdk.utils import ( ContextVar, + capture_internal_exceptions, event_from_exception, HAS_REAL_CONTEXTVARS, CONTEXTVARS_ERROR_MESSAGE, @@ -36,7 +33,6 @@ transaction_from_function, _get_installed_modules, ) -from sentry_sdk.tracing import Transaction from typing import TYPE_CHECKING @@ -56,6 +52,14 @@ TRANSACTION_STYLE_VALUES = ("endpoint", "url") +ASGI_SCOPE_PROPERTY_TO_ATTRIBUTE = { + "http_version": "network.protocol.version", + "method": "http.request.method", + "path": "url.path", + "scheme": "url.scheme", + "type": "network.protocol.name", +} + def _capture_exception(exc, mechanism_type="asgi"): # type: (Any, str) -> None @@ -100,7 +104,7 @@ def __init__( unsafe_context_data=False, # type: bool transaction_style="endpoint", # type: str mechanism_type="asgi", # type: str - span_origin="manual", # type: str + span_origin=None, # type: Optional[str] http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: Tuple[str, ...] ): # type: (...) -> None @@ -157,24 +161,40 @@ async def _run_asgi3(self, scope, receive, send): # type: (Any, Any, Any) -> Any return await self._run_app(scope, receive, send, asgi_version=3) + async def _run_original_app(self, scope, receive, send, asgi_version): + # type: (Any, Any, Any, Any, int) -> Any + try: + if asgi_version == 2: + return await self.app(scope)(receive, send) + else: + return await self.app(scope, receive, send) + + except Exception as exc: + _capture_exception(exc, mechanism_type=self.mechanism_type) + raise exc from None + async def _run_app(self, scope, receive, send, asgi_version): # type: (Any, Any, Any, Any, int) -> Any is_recursive_asgi_middleware = _asgi_middleware_applied.get(False) is_lifespan = scope["type"] == "lifespan" if is_recursive_asgi_middleware or is_lifespan: - try: - if asgi_version == 2: - return await self.app(scope)(receive, send) - else: - return await self.app(scope, receive, send) - - except Exception as exc: - _capture_exception(exc, mechanism_type=self.mechanism_type) - raise exc from None + return await self._run_original_app(scope, receive, send, asgi_version) _asgi_middleware_applied.set(True) try: with sentry_sdk.isolation_scope() as sentry_scope: + ( + transaction_name, + transaction_source, + ) = self._get_transaction_name_and_source( + self.transaction_style, + scope, + ) + sentry_scope.set_transaction_name( + transaction_name, + source=transaction_source, + ) + with track_session(sentry_scope, session_mode="request"): sentry_scope.clear_breadcrumbs() sentry_scope._name = "asgi" @@ -182,82 +202,47 @@ async def _run_app(self, scope, receive, send, asgi_version): sentry_scope.add_event_processor(processor) ty = scope["type"] - ( - transaction_name, - transaction_source, - ) = self._get_transaction_name_and_source( - self.transaction_style, - scope, - ) method = scope.get("method", "").upper() - transaction = None - if ty in ("http", "websocket"): - if ty == "websocket" or method in self.http_methods_to_capture: - transaction = continue_trace( - _get_headers(scope), - op="{}.server".format(ty), - name=transaction_name, - source=transaction_source, - origin=self.span_origin, - ) - logger.debug( - "[ASGI] Created transaction (continuing trace): %s", - transaction, - ) - else: - transaction = Transaction( - op=OP.HTTP_SERVER, + should_trace = ty == "websocket" or ( + ty == "http" and method in self.http_methods_to_capture + ) + if not should_trace: + return await self._run_original_app( + scope, receive, send, asgi_version + ) + + with sentry_sdk.continue_trace(_get_headers(scope)): + with sentry_sdk.start_span( + op=( + OP.WEBSOCKET_SERVER + if ty == "websocket" + else OP.HTTP_SERVER + ), name=transaction_name, source=transaction_source, origin=self.span_origin, - ) - logger.debug( - "[ASGI] Created transaction (new): %s", transaction - ) - - if transaction: - transaction.set_tag("asgi.type", ty) - logger.debug( - "[ASGI] Set transaction name and source on transaction: '%s' / '%s'", - transaction.name, - transaction.source, - ) - - with ( - sentry_sdk.start_transaction( - transaction, - custom_sampling_context={"asgi_scope": scope}, - ) - if transaction is not None - else nullcontext() - ): - logger.debug("[ASGI] Started transaction: %s", transaction) - try: + attributes=_prepopulate_attributes(scope), + ) as span: + if span is not None: + logger.debug("[ASGI] Started transaction: %s", span) + span.set_tag("asgi.type", ty) async def _sentry_wrapped_send(event): # type: (Dict[str, Any]) -> Any - if transaction is not None: - is_http_response = ( - event.get("type") == "http.response.start" - and "status" in event - ) - if is_http_response: - transaction.set_http_status(event["status"]) + is_http_response = ( + event.get("type") == "http.response.start" + and span is not None + and "status" in event + ) + if is_http_response: + span.set_http_status(event["status"]) return await send(event) - if asgi_version == 2: - return await self.app(scope)( - receive, _sentry_wrapped_send - ) - else: - return await self.app( - scope, receive, _sentry_wrapped_send - ) - except Exception as exc: - _capture_exception(exc, mechanism_type=self.mechanism_type) - raise exc from None + return await self._run_original_app( + scope, receive, _sentry_wrapped_send, asgi_version + ) finally: _asgi_middleware_applied.set(False) @@ -336,3 +321,37 @@ def _get_transaction_name_and_source(self, transaction_style, asgi_scope): return name, source return name, source + + +def _prepopulate_attributes(scope): + # type: (Any) -> dict[str, Any] + """Unpack ASGI scope into serializable OTel attributes.""" + scope = scope or {} + + attributes = {} + for attr, key in ASGI_SCOPE_PROPERTY_TO_ATTRIBUTE.items(): + if scope.get(attr): + attributes[key] = scope[attr] + + for attr in ("client", "server"): + if scope.get(attr): + try: + host, port = scope[attr] + attributes[f"{attr}.address"] = host + if port is not None: + attributes[f"{attr}.port"] = port + except Exception: + pass + + with capture_internal_exceptions(): + full_url = _get_url(scope) + query = _get_query(scope) + if query: + attributes["url.query"] = query + full_url = f"{full_url}?{query}" + + attributes["url.full"] = full_url + + attributes.update(_request_headers_to_span_attributes(_get_headers(scope))) + + return attributes diff --git a/sentry_sdk/integrations/asyncio.py b/sentry_sdk/integrations/asyncio.py index ae580ca038..d287ce6118 100644 --- a/sentry_sdk/integrations/asyncio.py +++ b/sentry_sdk/integrations/asyncio.py @@ -48,6 +48,7 @@ async def _task_with_sentry_span_creation(): op=OP.FUNCTION, name=get_name(coro), origin=AsyncioIntegration.origin, + only_if_parent=True, ): try: result = await coro diff --git a/sentry_sdk/integrations/asyncpg.py b/sentry_sdk/integrations/asyncpg.py index b6b53f4668..65f4d30e0d 100644 --- a/sentry_sdk/integrations/asyncpg.py +++ b/sentry_sdk/integrations/asyncpg.py @@ -1,6 +1,6 @@ from __future__ import annotations import contextlib -from typing import Any, TypeVar, Callable, Awaitable, Iterator +from typing import Any, TypeVar, Callable, Awaitable, Iterator, Optional import sentry_sdk from sentry_sdk.consts import OP, SPANDATA @@ -8,6 +8,7 @@ from sentry_sdk.tracing import Span from sentry_sdk.tracing_utils import add_query_source, record_sql_queries from sentry_sdk.utils import ( + _serialize_span_attribute, ensure_integration_enabled, parse_version, capture_internal_exceptions, @@ -38,7 +39,6 @@ def setup_once() -> None: asyncpg.Connection.execute = _wrap_execute( asyncpg.Connection.execute, ) - asyncpg.Connection._execute = _wrap_connection_method( asyncpg.Connection._execute ) @@ -78,8 +78,8 @@ async def _inner(*args: Any, **kwargs: Any) -> T: ) as span: res = await f(*args, **kwargs) - with capture_internal_exceptions(): - add_query_source(span) + with capture_internal_exceptions(): + add_query_source(span) return res @@ -121,10 +121,13 @@ def _wrap_connection_method( async def _inner(*args: Any, **kwargs: Any) -> T: if sentry_sdk.get_client().get_integration(AsyncPGIntegration) is None: return await f(*args, **kwargs) + query = args[1] params_list = args[2] if len(args) > 2 else None + with _record(None, query, params_list, executemany=executemany) as span: - _set_db_data(span, args[0]) + data = _get_db_data(conn=args[0]) + _set_on_span(span, data) res = await f(*args, **kwargs) return res @@ -144,9 +147,10 @@ def _inner(*args: Any, **kwargs: Any) -> T: # noqa: N807 params_list, executemany=False, ) as span: - _set_db_data(span, args[0]) + data = _get_db_data(conn=args[0]) + _set_on_span(span, data) res = f(*args, **kwargs) - span.set_data("db.cursor", res) + span.set_attribute("db.cursor", _serialize_span_attribute(res)) return res @@ -158,29 +162,24 @@ async def _inner(*args: Any, **kwargs: Any) -> T: if sentry_sdk.get_client().get_integration(AsyncPGIntegration) is None: return await f(*args, **kwargs) - user = kwargs["params"].user - database = kwargs["params"].database - with sentry_sdk.start_span( op=OP.DB, name="connect", origin=AsyncPGIntegration.origin, + only_if_parent=True, ) as span: - span.set_data(SPANDATA.DB_SYSTEM, "postgresql") - addr = kwargs.get("addr") - if addr: - try: - span.set_data(SPANDATA.SERVER_ADDRESS, addr[0]) - span.set_data(SPANDATA.SERVER_PORT, addr[1]) - except IndexError: - pass - span.set_data(SPANDATA.DB_NAME, database) - span.set_data(SPANDATA.DB_USER, user) + data = _get_db_data( + addr=kwargs.get("addr"), + database=kwargs["params"].database, + user=kwargs["params"].user, + ) + _set_on_span(span, data) with capture_internal_exceptions(): sentry_sdk.add_breadcrumb( - message="connect", category="query", data=span._data + message="connect", category="query", data=data ) + res = await f(*args, **kwargs) return res @@ -188,21 +187,37 @@ async def _inner(*args: Any, **kwargs: Any) -> T: return _inner -def _set_db_data(span: Span, conn: Any) -> None: - span.set_data(SPANDATA.DB_SYSTEM, "postgresql") +def _get_db_data( + conn: Any = None, + addr: Optional[tuple[str, ...]] = None, + database: Optional[str] = None, + user: Optional[str] = None, +) -> dict[str, str]: + if conn is not None: + addr = conn._addr + database = conn._params.database + user = conn._params.user + + data = { + SPANDATA.DB_SYSTEM: "postgresql", + } - addr = conn._addr if addr: try: - span.set_data(SPANDATA.SERVER_ADDRESS, addr[0]) - span.set_data(SPANDATA.SERVER_PORT, addr[1]) + data[SPANDATA.SERVER_ADDRESS] = addr[0] + data[SPANDATA.SERVER_PORT] = addr[1] except IndexError: pass - database = conn._params.database if database: - span.set_data(SPANDATA.DB_NAME, database) + data[SPANDATA.DB_NAME] = database - user = conn._params.user if user: - span.set_data(SPANDATA.DB_USER, user) + data[SPANDATA.DB_USER] = user + + return data + + +def _set_on_span(span: Span, data: dict[str, Any]) -> None: + for key, value in data.items(): + span.set_attribute(key, value) diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py index 4990fd6e6a..66d14b22a3 100644 --- a/sentry_sdk/integrations/aws_lambda.py +++ b/sentry_sdk/integrations/aws_lambda.py @@ -5,9 +5,9 @@ from copy import deepcopy from datetime import datetime, timedelta, timezone from os import environ +from urllib.parse import urlencode import sentry_sdk -from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import TransactionSource @@ -21,7 +21,10 @@ reraise, ) from sentry_sdk.integrations import Integration -from sentry_sdk.integrations._wsgi_common import _filter_headers +from sentry_sdk.integrations._wsgi_common import ( + _filter_headers, + _request_headers_to_span_attributes, +) from typing import TYPE_CHECKING @@ -40,6 +43,17 @@ MILLIS_TO_SECONDS = 1000.0 +EVENT_TO_ATTRIBUTES = { + "httpMethod": "http.request.method", + "queryStringParameters": "url.query", + "path": "url.path", +} + +CONTEXT_TO_ATTRIBUTES = { + "function_name": "faas.name", +} + + def _wrap_init_error(init_error): # type: (F) -> F @ensure_integration_enabled(AwsLambdaIntegration, init_error) @@ -110,6 +124,9 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs): configured_time = aws_context.get_remaining_time_in_millis() with sentry_sdk.isolation_scope() as scope: + scope.set_transaction_name( + aws_context.function_name, source=TransactionSource.COMPONENT + ) timeout_thread = None with capture_internal_exceptions(): scope.clear_breadcrumbs() @@ -149,34 +166,28 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs): if not isinstance(headers, dict): headers = {} - transaction = continue_trace( - headers, - op=OP.FUNCTION_AWS, - name=aws_context.function_name, - source=TransactionSource.COMPONENT, - origin=AwsLambdaIntegration.origin, - ) - with sentry_sdk.start_transaction( - transaction, - custom_sampling_context={ - "aws_event": aws_event, - "aws_context": aws_context, - }, - ): - try: - return handler(aws_event, aws_context, *args, **kwargs) - except Exception: - exc_info = sys.exc_info() - sentry_event, hint = event_from_exception( - exc_info, - client_options=client.options, - mechanism={"type": "aws_lambda", "handled": False}, - ) - sentry_sdk.capture_event(sentry_event, hint=hint) - reraise(*exc_info) - finally: - if timeout_thread: - timeout_thread.stop() + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span( + op=OP.FUNCTION_AWS, + name=aws_context.function_name, + source=TransactionSource.COMPONENT, + origin=AwsLambdaIntegration.origin, + attributes=_prepopulate_attributes(request_data, aws_context), + ): + try: + return handler(aws_event, aws_context, *args, **kwargs) + except Exception: + exc_info = sys.exc_info() + sentry_event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "aws_lambda", "handled": False}, + ) + sentry_sdk.capture_event(sentry_event, hint=hint) + reraise(*exc_info) + finally: + if timeout_thread: + timeout_thread.stop() return sentry_handler # type: ignore @@ -219,77 +230,44 @@ def setup_once(): ) return - pre_37 = hasattr(lambda_bootstrap, "handle_http_request") # Python 3.6 - - if pre_37: - old_handle_event_request = lambda_bootstrap.handle_event_request - - def sentry_handle_event_request(request_handler, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any - request_handler = _wrap_handler(request_handler) - return old_handle_event_request(request_handler, *args, **kwargs) + lambda_bootstrap.LambdaRuntimeClient.post_init_error = _wrap_init_error( + lambda_bootstrap.LambdaRuntimeClient.post_init_error + ) - lambda_bootstrap.handle_event_request = sentry_handle_event_request + old_handle_event_request = lambda_bootstrap.handle_event_request - old_handle_http_request = lambda_bootstrap.handle_http_request - - def sentry_handle_http_request(request_handler, *args, **kwargs): - # type: (Any, *Any, **Any) -> Any - request_handler = _wrap_handler(request_handler) - return old_handle_http_request(request_handler, *args, **kwargs) - - lambda_bootstrap.handle_http_request = sentry_handle_http_request + def sentry_handle_event_request( # type: ignore + lambda_runtime_client, request_handler, *args, **kwargs + ): + request_handler = _wrap_handler(request_handler) + return old_handle_event_request( + lambda_runtime_client, request_handler, *args, **kwargs + ) - # Patch to_json to drain the queue. This should work even when the - # SDK is initialized inside of the handler + lambda_bootstrap.handle_event_request = sentry_handle_event_request - old_to_json = lambda_bootstrap.to_json + # Patch the runtime client to drain the queue. This should work + # even when the SDK is initialized inside of the handler - def sentry_to_json(*args, **kwargs): + def _wrap_post_function(f): + # type: (F) -> F + def inner(*args, **kwargs): # type: (*Any, **Any) -> Any _drain_queue() - return old_to_json(*args, **kwargs) + return f(*args, **kwargs) - lambda_bootstrap.to_json = sentry_to_json - else: - lambda_bootstrap.LambdaRuntimeClient.post_init_error = _wrap_init_error( - lambda_bootstrap.LambdaRuntimeClient.post_init_error - ) + return inner # type: ignore - old_handle_event_request = lambda_bootstrap.handle_event_request - - def sentry_handle_event_request( # type: ignore - lambda_runtime_client, request_handler, *args, **kwargs - ): - request_handler = _wrap_handler(request_handler) - return old_handle_event_request( - lambda_runtime_client, request_handler, *args, **kwargs - ) - - lambda_bootstrap.handle_event_request = sentry_handle_event_request - - # Patch the runtime client to drain the queue. This should work - # even when the SDK is initialized inside of the handler - - def _wrap_post_function(f): - # type: (F) -> F - def inner(*args, **kwargs): - # type: (*Any, **Any) -> Any - _drain_queue() - return f(*args, **kwargs) - - return inner # type: ignore - - lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = ( - _wrap_post_function( - lambda_bootstrap.LambdaRuntimeClient.post_invocation_result - ) + lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = ( + _wrap_post_function( + lambda_bootstrap.LambdaRuntimeClient.post_invocation_result ) - lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = ( - _wrap_post_function( - lambda_bootstrap.LambdaRuntimeClient.post_invocation_error - ) + ) + lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = ( + _wrap_post_function( + lambda_bootstrap.LambdaRuntimeClient.post_invocation_error ) + ) def get_lambda_bootstrap(): @@ -362,7 +340,7 @@ def event_processor(sentry_event, hint, start_time=start_time): request["url"] = _get_url(aws_event, aws_context) if "queryStringParameters" in aws_event: - request["query_string"] = aws_event["queryStringParameters"] + request["query_string"] = urlencode(aws_event["queryStringParameters"]) if "headers" in aws_event: request["headers"] = _filter_headers(aws_event["headers"]) @@ -402,7 +380,9 @@ def _get_url(aws_event, aws_context): path = aws_event.get("path", None) headers = aws_event.get("headers") - if headers is None: + # Some AWS Services (ie. EventBridge) set headers as a list + # or None, so we must ensure it is a dict + if not isinstance(headers, dict): headers = {} host = headers.get("Host", None) @@ -497,3 +477,40 @@ def _event_from_error_json(error_json): } # type: Event return event + + +def _prepopulate_attributes(aws_event, aws_context): + # type: (Any, Any) -> dict[str, Any] + attributes = { + "cloud.provider": "aws", + } + + for prop, attr in EVENT_TO_ATTRIBUTES.items(): + if aws_event.get(prop) is not None: + if prop == "queryStringParameters": + attributes[attr] = urlencode(aws_event[prop]) + else: + attributes[attr] = aws_event[prop] + + for prop, attr in CONTEXT_TO_ATTRIBUTES.items(): + if getattr(aws_context, prop, None) is not None: + attributes[attr] = getattr(aws_context, prop) + + url = _get_url(aws_event, aws_context) + if url: + if aws_event.get("queryStringParameters"): + url += f"?{urlencode(aws_event['queryStringParameters'])}" + attributes["url.full"] = url + + headers = {} + if aws_event.get("headers") and isinstance(aws_event["headers"], dict): + headers = aws_event["headers"] + + if headers.get("X-Forwarded-Proto"): + attributes["network.protocol.name"] = headers["X-Forwarded-Proto"] + if headers.get("Host"): + attributes["server.address"] = headers["Host"] + + attributes.update(_request_headers_to_span_attributes(headers)) + + return attributes diff --git a/sentry_sdk/integrations/boto3.py b/sentry_sdk/integrations/boto3.py index 0207341f1b..65239b7548 100644 --- a/sentry_sdk/integrations/boto3.py +++ b/sentry_sdk/integrations/boto3.py @@ -3,7 +3,6 @@ import sentry_sdk from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable -from sentry_sdk.tracing import Span from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, @@ -19,6 +18,8 @@ from typing import Optional from typing import Type + from sentry_sdk.tracing import Span + try: from botocore import __version__ as BOTOCORE_VERSION # type: ignore from botocore.client import BaseClient # type: ignore @@ -63,17 +64,23 @@ def _sentry_request_created(service_id, request, operation_name, **kwargs): op=OP.HTTP_CLIENT, name=description, origin=Boto3Integration.origin, + only_if_parent=True, ) + data = { + SPANDATA.HTTP_METHOD: request.method, + } with capture_internal_exceptions(): parsed_url = parse_url(request.url, sanitize=False) - span.set_data("aws.request.url", parsed_url.url) - span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) - span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) + data["aws.request.url"] = parsed_url.url + data[SPANDATA.HTTP_QUERY] = parsed_url.query + data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment + + for key, value in data.items(): + span.set_attribute(key, value) span.set_tag("aws.service_id", service_id) span.set_tag("aws.operation_name", operation_name) - span.set_data(SPANDATA.HTTP_METHOD, request.method) # We do it in order for subsequent http calls/retries be # attached to this span. @@ -82,6 +89,7 @@ def _sentry_request_created(service_id, request, operation_name, **kwargs): # request.context is an open-ended data-structure # where we can add anything useful in request life cycle. request.context["_sentrysdk_span"] = span + request.context["_sentrysdk_span_data"] = data def _sentry_after_call(context, parsed, **kwargs): @@ -91,20 +99,28 @@ def _sentry_after_call(context, parsed, **kwargs): # Span could be absent if the integration is disabled. if span is None: return - span.__exit__(None, None, None) + + span_data = context.pop("_sentrysdk_span_data", {}) + + sentry_sdk.add_breadcrumb( + type="http", + category="httplib", + data=span_data, + ) body = parsed.get("Body") if not isinstance(body, StreamingBody): + span.__exit__(None, None, None) return - streaming_span = span.start_child( + streaming_span = sentry_sdk.start_span( op=OP.HTTP_CLIENT_STREAM, - name=span.description, + name=span.name, origin=Boto3Integration.origin, + only_if_parent=True, ) orig_read = body.read - orig_close = body.close def sentry_streaming_body_read(*args, **kwargs): # type: (*Any, **Any) -> bytes @@ -119,6 +135,8 @@ def sentry_streaming_body_read(*args, **kwargs): body.read = sentry_streaming_body_read + orig_close = body.close + def sentry_streaming_body_close(*args, **kwargs): # type: (*Any, **Any) -> None streaming_span.finish() @@ -126,6 +144,8 @@ def sentry_streaming_body_close(*args, **kwargs): body.close = sentry_streaming_body_close + span.__exit__(None, None, None) + def _sentry_after_call_error(context, exception, **kwargs): # type: (Dict[str, Any], Type[BaseException], **Any) -> None @@ -134,4 +154,13 @@ def _sentry_after_call_error(context, exception, **kwargs): # Span could be absent if the integration is disabled. if span is None: return + + span_data = context.pop("_sentrysdk_span_data", {}) + + sentry_sdk.add_breadcrumb( + type="http", + category="httplib", + data=span_data, + ) + span.__exit__(type(exception), exception, None) diff --git a/sentry_sdk/integrations/bottle.py b/sentry_sdk/integrations/bottle.py index 8a9fc41208..1fefcf0319 100644 --- a/sentry_sdk/integrations/bottle.py +++ b/sentry_sdk/integrations/bottle.py @@ -1,7 +1,7 @@ import functools import sentry_sdk -from sentry_sdk.tracing import SOURCE_FOR_STYLE +from sentry_sdk.consts import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, diff --git a/sentry_sdk/integrations/celery/__init__.py b/sentry_sdk/integrations/celery/__init__.py index e8811d767e..95a09e6029 100644 --- a/sentry_sdk/integrations/celery/__init__.py +++ b/sentry_sdk/integrations/celery/__init__.py @@ -4,8 +4,7 @@ import sentry_sdk from sentry_sdk import isolation_scope -from sentry_sdk.api import continue_trace -from sentry_sdk.consts import OP, SPANSTATUS, SPANDATA +from sentry_sdk.consts import OP, SPANSTATUS, SPANDATA, BAGGAGE_HEADER_NAME from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable from sentry_sdk.integrations.celery.beat import ( _patch_beat_apply_entry, @@ -14,7 +13,7 @@ ) from sentry_sdk.integrations.celery.utils import _now_seconds_since_epoch from sentry_sdk.integrations.logging import ignore_logger -from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, TransactionSource +from sentry_sdk.tracing import TransactionSource from sentry_sdk.tracing_utils import Baggage from sentry_sdk.utils import ( capture_internal_exceptions, @@ -113,7 +112,6 @@ def _capture_exception(task, exc_info): return if isinstance(exc_info[1], CELERY_CONTROL_FLOW_EXCEPTIONS): - # ??? Doesn't map to anything _set_status("aborted") return @@ -277,6 +275,7 @@ def apply_async(*args, **kwargs): op=OP.QUEUE_SUBMIT_CELERY, name=task_name, origin=CeleryIntegration.origin, + only_if_parent=True, ) if not task_started_from_beat else NoOpMgr() @@ -307,40 +306,29 @@ def _inner(*args, **kwargs): with isolation_scope() as scope: scope._name = "celery" scope.clear_breadcrumbs() + scope.set_transaction_name(task.name, source=TransactionSource.TASK) scope.add_event_processor(_make_event_processor(task, *args, **kwargs)) - transaction = None - # Celery task objects are not a thing to be trusted. Even # something such as attribute access can fail. - with capture_internal_exceptions(): - headers = args[3].get("headers") or {} - transaction = continue_trace( - headers, + headers = args[3].get("headers") or {} + + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span( op=OP.QUEUE_TASK_CELERY, - name="unknown celery task", + name=task.name, source=TransactionSource.TASK, origin=CeleryIntegration.origin, - ) - transaction.name = task.name - transaction.set_status(SPANSTATUS.OK) + # for some reason, args[1] is a list if non-empty but a + # tuple if empty + attributes=_prepopulate_attributes(task, list(args[1]), args[2]), + ) as root_span: + return_value = f(*args, **kwargs) - if transaction is None: - return f(*args, **kwargs) + if root_span.status is None: + root_span.set_status(SPANSTATUS.OK) - with sentry_sdk.start_transaction( - transaction, - custom_sampling_context={ - "celery_job": { - "task": task.name, - # for some reason, args[1] is a list if non-empty but a - # tuple if empty - "args": list(args[1]), - "kwargs": args[2], - } - }, - ): - return f(*args, **kwargs) + return return_value return _inner # type: ignore @@ -355,7 +343,7 @@ def _set_messaging_destination_name(task, span): if delivery_info.get("exchange") == "" and routing_key is not None: # Empty exchange indicates the default exchange, meaning the tasks # are sent to the queue with the same name as the routing key. - span.set_data(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key) + span.set_attribute(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key) def _wrap_task_call(task, f): @@ -377,6 +365,7 @@ def _inner(*args, **kwargs): op=OP.QUEUE_PROCESS, name=task.name, origin=CeleryIntegration.origin, + only_if_parent=True, ) as span: _set_messaging_destination_name(task, span) @@ -391,23 +380,26 @@ def _inner(*args, **kwargs): ) if latency is not None: - span.set_data(SPANDATA.MESSAGING_MESSAGE_RECEIVE_LATENCY, latency) + span.set_attribute( + SPANDATA.MESSAGING_MESSAGE_RECEIVE_LATENCY, latency + ) with capture_internal_exceptions(): - span.set_data(SPANDATA.MESSAGING_MESSAGE_ID, task.request.id) + span.set_attribute(SPANDATA.MESSAGING_MESSAGE_ID, task.request.id) with capture_internal_exceptions(): - span.set_data( + span.set_attribute( SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, task.request.retries ) with capture_internal_exceptions(): - span.set_data( + span.set_attribute( SPANDATA.MESSAGING_SYSTEM, task.app.connection().transport.driver_type, ) return f(*args, **kwargs) + except Exception: exc_info = sys.exc_info() with capture_internal_exceptions(): @@ -506,23 +498,41 @@ def sentry_publish(self, *args, **kwargs): op=OP.QUEUE_PUBLISH, name=task_name, origin=CeleryIntegration.origin, + only_if_parent=True, ) as span: if task_id is not None: - span.set_data(SPANDATA.MESSAGING_MESSAGE_ID, task_id) + span.set_attribute(SPANDATA.MESSAGING_MESSAGE_ID, task_id) if exchange == "" and routing_key is not None: # Empty exchange indicates the default exchange, meaning messages are # routed to the queue with the same name as the routing key. - span.set_data(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key) + span.set_attribute(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key) if retries is not None: - span.set_data(SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, retries) + span.set_attribute(SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, retries) with capture_internal_exceptions(): - span.set_data( + span.set_attribute( SPANDATA.MESSAGING_SYSTEM, self.connection.transport.driver_type ) return original_publish(self, *args, **kwargs) Producer.publish = sentry_publish + + +def _prepopulate_attributes(task, args, kwargs): + # type: (Any, *Any, **Any) -> dict[str, str] + attributes = { + "celery.job.task": task.name, + } + + for i, arg in enumerate(args): + with capture_internal_exceptions(): + attributes[f"celery.job.args.{i}"] = str(arg) + + for kwarg, value in kwargs.items(): + with capture_internal_exceptions(): + attributes[f"celery.job.kwargs.{kwarg}"] = str(value) + + return attributes diff --git a/sentry_sdk/integrations/clickhouse_driver.py b/sentry_sdk/integrations/clickhouse_driver.py index 2561bfad04..7c908b7d6d 100644 --- a/sentry_sdk/integrations/clickhouse_driver.py +++ b/sentry_sdk/integrations/clickhouse_driver.py @@ -3,9 +3,13 @@ from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable from sentry_sdk.tracing import Span from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.utils import capture_internal_exceptions, ensure_integration_enabled +from sentry_sdk.utils import ( + _serialize_span_attribute, + capture_internal_exceptions, + ensure_integration_enabled, +) -from typing import TYPE_CHECKING, TypeVar +from typing import TYPE_CHECKING, cast, Any, Dict, TypeVar # Hack to get new Python features working in older versions # without introducing a hard dependency on `typing_extensions` @@ -84,19 +88,23 @@ def _inner(*args: P.args, **kwargs: P.kwargs) -> T: op=OP.DB, name=query, origin=ClickhouseDriverIntegration.origin, + only_if_parent=True, ) connection._sentry_span = span # type: ignore[attr-defined] - _set_db_data(span, connection) - - span.set_data("query", query) + data = _get_db_data(connection) + data = cast("dict[str, Any]", data) + data["db.query.text"] = query if query_id: - span.set_data("db.query_id", query_id) + data["db.query_id"] = query_id if params and should_send_default_pii(): - span.set_data("db.params", params) + data["db.params"] = params + + connection._sentry_db_data = data # type: ignore[attr-defined] + _set_on_span(span, data) # run the original code ret = f(*args, **kwargs) @@ -109,20 +117,32 @@ def _inner(*args: P.args, **kwargs: P.kwargs) -> T: def _wrap_end(f: Callable[P, T]) -> Callable[P, T]: def _inner_end(*args: P.args, **kwargs: P.kwargs) -> T: res = f(*args, **kwargs) - instance = args[0] - span = getattr(instance.connection, "_sentry_span", None) # type: ignore[attr-defined] + client = cast("clickhouse_driver.client.Client", args[0]) + connection = client.connection + span = getattr(connection, "_sentry_span", None) if span is not None: + data = getattr(connection, "_sentry_db_data", {}) + if res is not None and should_send_default_pii(): - span.set_data("db.result", res) + data["db.result"] = res + span.set_attribute("db.result", _serialize_span_attribute(res)) with capture_internal_exceptions(): - span.scope.add_breadcrumb( - message=span._data.pop("query"), category="query", data=span._data - ) + query = data.pop("db.query.text", None) + if query: + sentry_sdk.add_breadcrumb( + message=query, category="query", data=data + ) span.finish() + try: + del connection._sentry_db_data + del connection._sentry_span + except AttributeError: + pass + return res return _inner_end @@ -130,28 +150,39 @@ def _inner_end(*args: P.args, **kwargs: P.kwargs) -> T: def _wrap_send_data(f: Callable[P, T]) -> Callable[P, T]: def _inner_send_data(*args: P.args, **kwargs: P.kwargs) -> T: - instance = args[0] # type: clickhouse_driver.client.Client - data = args[2] - span = getattr(instance.connection, "_sentry_span", None) + client = cast("clickhouse_driver.client.Client", args[0]) + connection = client.connection + db_params_data = cast("list[Any]", args[2]) + span = getattr(connection, "_sentry_span", None) if span is not None: - _set_db_data(span, instance.connection) + data = _get_db_data(connection) + _set_on_span(span, data) if should_send_default_pii(): - db_params = span._data.get("db.params", []) - db_params.extend(data) - span.set_data("db.params", db_params) + saved_db_data = getattr( + connection, "_sentry_db_data", {} + ) # type: dict[str, Any] + db_params = saved_db_data.get("db.params") or [] # type: list[Any] + db_params.extend(db_params_data) + saved_db_data["db.params"] = db_params + span.set_attribute("db.params", _serialize_span_attribute(db_params)) return f(*args, **kwargs) return _inner_send_data -def _set_db_data( - span: Span, connection: clickhouse_driver.connection.Connection -) -> None: - span.set_data(SPANDATA.DB_SYSTEM, "clickhouse") - span.set_data(SPANDATA.SERVER_ADDRESS, connection.host) - span.set_data(SPANDATA.SERVER_PORT, connection.port) - span.set_data(SPANDATA.DB_NAME, connection.database) - span.set_data(SPANDATA.DB_USER, connection.user) +def _get_db_data(connection: clickhouse_driver.connection.Connection) -> Dict[str, str]: + return { + SPANDATA.DB_SYSTEM: "clickhouse", + SPANDATA.SERVER_ADDRESS: connection.host, + SPANDATA.SERVER_PORT: connection.port, + SPANDATA.DB_NAME: connection.database, + SPANDATA.DB_USER: connection.user, + } + + +def _set_on_span(span: Span, data: Dict[str, Any]) -> None: + for key, value in data.items(): + span.set_attribute(key, _serialize_span_attribute(value)) diff --git a/sentry_sdk/integrations/cohere.py b/sentry_sdk/integrations/cohere.py index b4c2af91da..a80ccb19b3 100644 --- a/sentry_sdk/integrations/cohere.py +++ b/sentry_sdk/integrations/cohere.py @@ -147,6 +147,7 @@ def new_chat(*args, **kwargs): op=consts.OP.COHERE_CHAT_COMPLETIONS_CREATE, name="cohere.client.Chat", origin=CohereIntegration.origin, + only_if_parent=True, ) span.__enter__() try: @@ -233,6 +234,7 @@ def new_embed(*args, **kwargs): op=consts.OP.COHERE_EMBEDDINGS_CREATE, name="Cohere Embedding Creation", origin=CohereIntegration.origin, + only_if_parent=True, ) as span: if "texts" in kwargs and ( should_send_default_pii() and integration.include_prompts diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index ff67b3e39b..e62ba63f70 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -1,3 +1,4 @@ +import functools import inspect import sys import threading @@ -5,10 +6,9 @@ from importlib import import_module import sentry_sdk -from sentry_sdk.consts import OP, SPANDATA +from sentry_sdk.consts import OP, SPANDATA, SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.scope import add_global_event_processor, should_send_default_pii from sentry_sdk.serializer import add_global_repr_processor -from sentry_sdk.tracing import SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.tracing_utils import add_query_source, record_sql_queries from sentry_sdk.utils import ( AnnotatedValue, @@ -55,6 +55,7 @@ except ImportError: raise DidNotEnable("Django not installed") +from sentry_sdk.integrations.django.caching import patch_caching from sentry_sdk.integrations.django.transactions import LEGACY_RESOLVER from sentry_sdk.integrations.django.templates import ( get_template_frame_from_exception, @@ -64,11 +65,6 @@ from sentry_sdk.integrations.django.signals_handlers import patch_signals from sentry_sdk.integrations.django.views import patch_views -if DJANGO_VERSION[:2] > (1, 8): - from sentry_sdk.integrations.django.caching import patch_caching -else: - patch_caching = None # type: ignore - from typing import TYPE_CHECKING if TYPE_CHECKING: @@ -89,19 +85,6 @@ from sentry_sdk._types import Event, Hint, EventProcessor, NotImplementedType -if DJANGO_VERSION < (1, 10): - - def is_authenticated(request_user): - # type: (Any) -> bool - return request_user.is_authenticated() - -else: - - def is_authenticated(request_user): - # type: (Any) -> bool - return request_user.is_authenticated - - TRANSACTION_STYLE_VALUES = ("function_name", "url") @@ -131,7 +114,7 @@ def __init__( transaction_style="url", # type: str middleware_spans=True, # type: bool signals_spans=True, # type: bool - cache_spans=False, # type: bool + cache_spans=True, # type: bool signals_denylist=None, # type: Optional[list[signals.Signal]] http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: tuple[str, ...] ): @@ -321,6 +304,7 @@ def _patch_drf(): else: old_drf_initial = APIView.initial + @functools.wraps(old_drf_initial) def sentry_patched_drf_initial(self, request, *args, **kwargs): # type: (APIView, Any, *Any, **Any) -> Any with capture_internal_exceptions(): @@ -413,11 +397,13 @@ def _set_transaction_name_and_source(scope, transaction_style, request): if hasattr(urlconf, "handler404"): handler = urlconf.handler404 if isinstance(handler, str): - scope.transaction = handler + scope.set_transaction_name(handler) else: - scope.transaction = transaction_from_function( + name = transaction_from_function( getattr(handler, "view_class", handler) ) + if isinstance(name, str): + scope.set_transaction_name(name) except Exception: pass @@ -471,6 +457,7 @@ def _patch_get_response(): old_get_response = BaseHandler.get_response + @functools.wraps(old_get_response) def sentry_patched_get_response(self, request): # type: (Any, WSGIRequest) -> Union[HttpResponse, BaseException] _before_get_response(request) @@ -594,7 +581,7 @@ def _set_user_info(request, event): user = getattr(request, "user", None) - if user is None or not is_authenticated(user): + if user is None or not user.is_authenticated: return try: @@ -621,20 +608,11 @@ def install_sql_hook(): except ImportError: from django.db.backends.util import CursorWrapper - try: - # django 1.6 and 1.7 compatability - from django.db.backends import BaseDatabaseWrapper - except ImportError: - # django 1.8 or later - from django.db.backends.base.base import BaseDatabaseWrapper + from django.db.backends.base.base import BaseDatabaseWrapper - try: - real_execute = CursorWrapper.execute - real_executemany = CursorWrapper.executemany - real_connect = BaseDatabaseWrapper.connect - except AttributeError: - # This won't work on Django versions < 1.6 - return + real_execute = CursorWrapper.execute + real_executemany = CursorWrapper.executemany + real_connect = BaseDatabaseWrapper.connect @ensure_integration_enabled(DjangoIntegration, real_execute) def execute(self, sql, params=None): @@ -650,8 +628,8 @@ def execute(self, sql, params=None): _set_db_data(span, self) result = real_execute(self, sql, params) - with capture_internal_exceptions(): - add_query_source(span) + with capture_internal_exceptions(): + add_query_source(span) return result @@ -670,8 +648,8 @@ def executemany(self, sql, param_list): result = real_executemany(self, sql, param_list) - with capture_internal_exceptions(): - add_query_source(span) + with capture_internal_exceptions(): + add_query_source(span) return result @@ -685,6 +663,7 @@ def connect(self): op=OP.DB, name="connect", origin=DjangoIntegration.origin_db, + only_if_parent=True, ) as span: _set_db_data(span, self) return real_connect(self) @@ -699,7 +678,7 @@ def _set_db_data(span, cursor_or_db): # type: (Span, Any) -> None db = cursor_or_db.db if hasattr(cursor_or_db, "db") else cursor_or_db vendor = db.vendor - span.set_data(SPANDATA.DB_SYSTEM, vendor) + span.set_attribute(SPANDATA.DB_SYSTEM, vendor) # Some custom backends override `__getattr__`, making it look like `cursor_or_db` # actually has a `connection` and the `connection` has a `get_dsn_parameters` @@ -732,16 +711,16 @@ def _set_db_data(span, cursor_or_db): db_name = connection_params.get("dbname") or connection_params.get("database") if db_name is not None: - span.set_data(SPANDATA.DB_NAME, db_name) + span.set_attribute(SPANDATA.DB_NAME, db_name) server_address = connection_params.get("host") if server_address is not None: - span.set_data(SPANDATA.SERVER_ADDRESS, server_address) + span.set_attribute(SPANDATA.SERVER_ADDRESS, server_address) server_port = connection_params.get("port") if server_port is not None: - span.set_data(SPANDATA.SERVER_PORT, str(server_port)) + span.set_attribute(SPANDATA.SERVER_PORT, str(server_port)) server_socket_address = connection_params.get("unix_socket") if server_socket_address is not None: - span.set_data(SPANDATA.SERVER_SOCKET_ADDRESS, server_socket_address) + span.set_attribute(SPANDATA.SERVER_SOCKET_ADDRESS, server_socket_address) diff --git a/sentry_sdk/integrations/django/asgi.py b/sentry_sdk/integrations/django/asgi.py index 73a25acc9f..511de34855 100644 --- a/sentry_sdk/integrations/django/asgi.py +++ b/sentry_sdk/integrations/django/asgi.py @@ -88,6 +88,7 @@ def patch_django_asgi_handler_impl(cls): old_app = cls.__call__ + @functools.wraps(old_app) async def sentry_patched_asgi_handler(self, scope, receive, send): # type: (Any, Any, Any, Any) -> Any integration = sentry_sdk.get_client().get_integration(DjangoIntegration) @@ -125,6 +126,7 @@ def patch_get_response_async(cls, _before_get_response): # type: (Any, Any) -> None old_get_response_async = cls.get_response_async + @functools.wraps(old_get_response_async) async def sentry_patched_get_response_async(self, request): # type: (Any, Any) -> Union[HttpResponse, BaseException] _before_get_response(request) @@ -142,6 +144,7 @@ def patch_channels_asgi_handler_impl(cls): if channels.__version__ < "3.0.0": old_app = cls.__call__ + @functools.wraps(old_app) async def sentry_patched_asgi_handler(self, receive, send): # type: (Any, Any, Any) -> Any integration = sentry_sdk.get_client().get_integration(DjangoIntegration) @@ -173,8 +176,8 @@ def wrap_async_view(callback): async def sentry_wrapped_callback(request, *args, **kwargs): # type: (Any, *Any, **Any) -> Any current_scope = sentry_sdk.get_current_scope() - if current_scope.transaction is not None: - current_scope.transaction.update_active_thread() + if current_scope.root_span is not None: + current_scope.root_span.update_active_thread() sentry_scope = sentry_sdk.get_isolation_scope() if sentry_scope.profile is not None: @@ -184,6 +187,7 @@ async def sentry_wrapped_callback(request, *args, **kwargs): op=OP.VIEW_RENDER, name=request.resolver_match.view_name, origin=DjangoIntegration.origin, + only_if_parent=True, ): return await callback(request, *args, **kwargs) diff --git a/sentry_sdk/integrations/django/caching.py b/sentry_sdk/integrations/django/caching.py index 7985611761..65bf2674e1 100644 --- a/sentry_sdk/integrations/django/caching.py +++ b/sentry_sdk/integrations/django/caching.py @@ -54,27 +54,28 @@ def _instrument_call( op=op, name=description, origin=DjangoIntegration.origin, + only_if_parent=True, ) as span: value = original_method(*args, **kwargs) with capture_internal_exceptions(): if address is not None: - span.set_data(SPANDATA.NETWORK_PEER_ADDRESS, address) + span.set_attribute(SPANDATA.NETWORK_PEER_ADDRESS, address) if port is not None: - span.set_data(SPANDATA.NETWORK_PEER_PORT, port) + span.set_attribute(SPANDATA.NETWORK_PEER_PORT, port) key = _get_safe_key(method_name, args, kwargs) if key is not None: - span.set_data(SPANDATA.CACHE_KEY, key) + span.set_attribute(SPANDATA.CACHE_KEY, key) item_size = None if is_get_operation: if value: item_size = len(str(value)) - span.set_data(SPANDATA.CACHE_HIT, True) + span.set_attribute(SPANDATA.CACHE_HIT, True) else: - span.set_data(SPANDATA.CACHE_HIT, False) + span.set_attribute(SPANDATA.CACHE_HIT, False) else: # TODO: We don't handle `get_or_set` which we should arg_count = len(args) if arg_count >= 2: @@ -85,7 +86,7 @@ def _instrument_call( item_size = len(str(args[0])) if item_size is not None: - span.set_data(SPANDATA.CACHE_ITEM_SIZE, item_size) + span.set_attribute(SPANDATA.CACHE_ITEM_SIZE, item_size) return value @@ -133,22 +134,10 @@ def _get_address_port(settings): return address, int(port) if port is not None else None -def should_enable_cache_spans(): - # type: () -> bool - from sentry_sdk.integrations.django import DjangoIntegration - - client = sentry_sdk.get_client() - integration = client.get_integration(DjangoIntegration) - from django.conf import settings - - return integration is not None and ( - (client.spotlight is not None and settings.DEBUG is True) - or integration.cache_spans is True - ) - - def patch_caching(): # type: () -> None + from sentry_sdk.integrations.django import DjangoIntegration + if not hasattr(CacheHandler, "_sentry_patched"): if DJANGO_VERSION < (3, 2): original_get_item = CacheHandler.__getitem__ @@ -158,7 +147,8 @@ def sentry_get_item(self, alias): # type: (CacheHandler, str) -> Any cache = original_get_item(self, alias) - if should_enable_cache_spans(): + integration = sentry_sdk.get_client().get_integration(DjangoIntegration) + if integration is not None and integration.cache_spans: from django.conf import settings address, port = _get_address_port( @@ -180,7 +170,8 @@ def sentry_create_connection(self, alias): # type: (CacheHandler, str) -> Any cache = original_create_connection(self, alias) - if should_enable_cache_spans(): + integration = sentry_sdk.get_client().get_integration(DjangoIntegration) + if integration is not None and integration.cache_spans: address, port = _get_address_port(self.settings[alias or "default"]) _patch_cache(cache, address, port) diff --git a/sentry_sdk/integrations/django/middleware.py b/sentry_sdk/integrations/django/middleware.py index 245276566e..6640ac2919 100644 --- a/sentry_sdk/integrations/django/middleware.py +++ b/sentry_sdk/integrations/django/middleware.py @@ -89,6 +89,7 @@ def _check_middleware_span(old_method): op=OP.MIDDLEWARE_DJANGO, name=description, origin=DjangoIntegration.origin, + only_if_parent=True, ) middleware_span.set_tag("django.function_name", function_name) middleware_span.set_tag("django.middleware_name", middleware_name) diff --git a/sentry_sdk/integrations/django/signals_handlers.py b/sentry_sdk/integrations/django/signals_handlers.py index cb0f8b9d2e..6e398ddfc3 100644 --- a/sentry_sdk/integrations/django/signals_handlers.py +++ b/sentry_sdk/integrations/django/signals_handlers.py @@ -50,6 +50,7 @@ def patch_signals(): old_live_receivers = Signal._live_receivers + @wraps(old_live_receivers) def _sentry_live_receivers(self, sender): # type: (Signal, Any) -> Union[tuple[list[Callable[..., Any]], list[Callable[..., Any]]], list[Callable[..., Any]]] if DJANGO_VERSION >= (5, 0): @@ -68,8 +69,9 @@ def wrapper(*args, **kwargs): op=OP.EVENT_DJANGO, name=signal_name, origin=DjangoIntegration.origin, + only_if_parent=True, ) as span: - span.set_data("signal", signal_name) + span.set_attribute("signal", signal_name) return receiver(*args, **kwargs) return wrapper diff --git a/sentry_sdk/integrations/django/templates.py b/sentry_sdk/integrations/django/templates.py index 10e8a924b7..fd6e56b515 100644 --- a/sentry_sdk/integrations/django/templates.py +++ b/sentry_sdk/integrations/django/templates.py @@ -1,8 +1,8 @@ import functools from django.template import TemplateSyntaxError +from django.template.base import Origin from django.utils.safestring import mark_safe -from django import VERSION as DJANGO_VERSION import sentry_sdk from sentry_sdk.consts import OP @@ -17,13 +17,6 @@ from typing import Iterator from typing import Tuple -try: - # support Django 1.9 - from django.template.base import Origin -except ImportError: - # backward compatibility - from django.template.loader import LoaderOrigin as Origin - def get_template_frame_from_exception(exc_value): # type: (Optional[BaseException]) -> Optional[Dict[str, Any]] @@ -72,14 +65,15 @@ def rendered_content(self): op=OP.TEMPLATE_RENDER, name=_get_template_name_description(self.template_name), origin=DjangoIntegration.origin, + only_if_parent=True, ) as span: - span.set_data("context", self.context_data) + if isinstance(self.context_data, dict): + for k, v in self.context_data.items(): + span.set_attribute(f"context.{k}", v) return real_rendered_content.fget(self) SimpleTemplateResponse.rendered_content = rendered_content - if DJANGO_VERSION < (1, 7): - return import django.shortcuts real_render = django.shortcuts.render @@ -100,8 +94,10 @@ def render(request, template_name, context=None, *args, **kwargs): op=OP.TEMPLATE_RENDER, name=_get_template_name_description(template_name), origin=DjangoIntegration.origin, + only_if_parent=True, ) as span: - span.set_data("context", context) + for k, v in context.items(): + span.set_attribute(f"context.{k}", v) return real_render(request, template_name, context, *args, **kwargs) django.shortcuts.render = render diff --git a/sentry_sdk/integrations/django/transactions.py b/sentry_sdk/integrations/django/transactions.py index 5a7d69f3c9..78b972bc37 100644 --- a/sentry_sdk/integrations/django/transactions.py +++ b/sentry_sdk/integrations/django/transactions.py @@ -19,12 +19,7 @@ from typing import Union from re import Pattern -from django import VERSION as DJANGO_VERSION - -if DJANGO_VERSION >= (2, 0): - from django.urls.resolvers import RoutePattern -else: - RoutePattern = None +from django.urls.resolvers import RoutePattern try: from django.urls import get_resolver diff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py index 0a9861a6a6..6240ac6bbb 100644 --- a/sentry_sdk/integrations/django/views.py +++ b/sentry_sdk/integrations/django/views.py @@ -31,12 +31,14 @@ def patch_views(): old_make_view_atomic = BaseHandler.make_view_atomic old_render = SimpleTemplateResponse.render + @functools.wraps(old_render) def sentry_patched_render(self): # type: (SimpleTemplateResponse) -> Any with sentry_sdk.start_span( op=OP.VIEW_RESPONSE_RENDER, name="serialize response", origin=DjangoIntegration.origin, + only_if_parent=True, ): return old_render(self) @@ -77,8 +79,8 @@ def _wrap_sync_view(callback): def sentry_wrapped_callback(request, *args, **kwargs): # type: (Any, *Any, **Any) -> Any current_scope = sentry_sdk.get_current_scope() - if current_scope.transaction is not None: - current_scope.transaction.update_active_thread() + if current_scope.root_span is not None: + current_scope.root_span.update_active_thread() sentry_scope = sentry_sdk.get_isolation_scope() # set the active thread id to the handler thread for sync views @@ -90,6 +92,7 @@ def sentry_wrapped_callback(request, *args, **kwargs): op=OP.VIEW_RENDER, name=request.resolver_match.view_name, origin=DjangoIntegration.origin, + only_if_parent=True, ): return callback(request, *args, **kwargs) diff --git a/sentry_sdk/integrations/falcon.py b/sentry_sdk/integrations/falcon.py index ddedcb10de..9038c01a3f 100644 --- a/sentry_sdk/integrations/falcon.py +++ b/sentry_sdk/integrations/falcon.py @@ -1,8 +1,8 @@ import sentry_sdk +from sentry_sdk.consts import SOURCE_FOR_STYLE from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import RequestExtractor from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware -from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, @@ -19,8 +19,6 @@ from sentry_sdk._types import Event, EventProcessor -# In Falcon 3.0 `falcon.api_helpers` is renamed to `falcon.app_helpers` -# and `falcon.API` to `falcon.App` try: import falcon # type: ignore @@ -29,24 +27,15 @@ except ImportError: raise DidNotEnable("Falcon not installed") -try: - import falcon.app_helpers # type: ignore - - falcon_helpers = falcon.app_helpers - falcon_app_class = falcon.App - FALCON3 = True -except ImportError: - import falcon.api_helpers # type: ignore +import falcon.app_helpers # type: ignore - falcon_helpers = falcon.api_helpers - falcon_app_class = falcon.API - FALCON3 = False +falcon_helpers = falcon.app_helpers +falcon_app_class = falcon.App _FALCON_UNSET = None # type: Optional[object] -if FALCON3: # falcon.request._UNSET is only available in Falcon 3.0+ - with capture_internal_exceptions(): - from falcon.request import _UNSET as _FALCON_UNSET # type: ignore[import-not-found, no-redef] +with capture_internal_exceptions(): + from falcon.request import _UNSET as _FALCON_UNSET # type: ignore[import-not-found, no-redef] class FalconRequestExtractor(RequestExtractor): @@ -232,14 +221,7 @@ def _exception_leads_to_http_5xx(ex, response): ex, (falcon.HTTPError, falcon.http_status.HTTPStatus) ) - # We only check the HTTP status on Falcon 3 because in Falcon 2, the status on the response - # at the stage where we capture it is listed as 200, even though we would expect to see a 500 - # status. Since at the time of this change, Falcon 2 is ca. 4 years old, we have decided to - # only perform this check on Falcon 3+, despite the risk that some handled errors might be - # reported to Sentry as unhandled on Falcon 2. - return (is_server_error or is_unhandled_error) and ( - not FALCON3 or _has_http_5xx_status(response) - ) + return (is_server_error or is_unhandled_error) and _has_http_5xx_status(response) def _has_http_5xx_status(response): diff --git a/sentry_sdk/integrations/fastapi.py b/sentry_sdk/integrations/fastapi.py index 76c6adee0f..0e087e3975 100644 --- a/sentry_sdk/integrations/fastapi.py +++ b/sentry_sdk/integrations/fastapi.py @@ -3,9 +3,9 @@ from functools import wraps import sentry_sdk +from sentry_sdk.consts import SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.integrations import DidNotEnable from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.utils import ( transaction_from_function, logger, @@ -89,8 +89,8 @@ def _sentry_get_request_handler(*args, **kwargs): def _sentry_call(*args, **kwargs): # type: (*Any, **Any) -> Any current_scope = sentry_sdk.get_current_scope() - if current_scope.transaction is not None: - current_scope.transaction.update_active_thread() + if current_scope.root_span is not None: + current_scope.root_span.update_active_thread() sentry_scope = sentry_sdk.get_isolation_scope() if sentry_scope.profile is not None: diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py index f45ec6db20..9223eacd24 100644 --- a/sentry_sdk/integrations/flask.py +++ b/sentry_sdk/integrations/flask.py @@ -1,4 +1,5 @@ import sentry_sdk +from sentry_sdk.consts import SOURCE_FOR_STYLE from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration from sentry_sdk.integrations._wsgi_common import ( DEFAULT_HTTP_METHODS_TO_CAPTURE, @@ -6,7 +7,6 @@ ) from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py index c637b7414a..97b72ff1ce 100644 --- a/sentry_sdk/integrations/gcp.py +++ b/sentry_sdk/integrations/gcp.py @@ -5,10 +5,12 @@ from os import environ import sentry_sdk -from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.integrations import Integration -from sentry_sdk.integrations._wsgi_common import _filter_headers +from sentry_sdk.integrations._wsgi_common import ( + _filter_headers, + _request_headers_to_span_attributes, +) from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import TransactionSource from sentry_sdk.utils import ( @@ -84,42 +86,30 @@ def sentry_func(functionhandler, gcp_event, *args, **kwargs): if hasattr(gcp_event, "headers"): headers = gcp_event.headers - transaction = continue_trace( - headers, - op=OP.FUNCTION_GCP, - name=environ.get("FUNCTION_NAME", ""), - source=TransactionSource.COMPONENT, - origin=GcpIntegration.origin, - ) - sampling_context = { - "gcp_env": { - "function_name": environ.get("FUNCTION_NAME"), - "function_entry_point": environ.get("ENTRY_POINT"), - "function_identity": environ.get("FUNCTION_IDENTITY"), - "function_region": environ.get("FUNCTION_REGION"), - "function_project": environ.get("GCP_PROJECT"), - }, - "gcp_event": gcp_event, - } - with sentry_sdk.start_transaction( - transaction, custom_sampling_context=sampling_context - ): - try: - return func(functionhandler, gcp_event, *args, **kwargs) - except Exception: - exc_info = sys.exc_info() - sentry_event, hint = event_from_exception( - exc_info, - client_options=client.options, - mechanism={"type": "gcp", "handled": False}, - ) - sentry_sdk.capture_event(sentry_event, hint=hint) - reraise(*exc_info) - finally: - if timeout_thread: - timeout_thread.stop() - # Flush out the event queue - client.flush() + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span( + op=OP.FUNCTION_GCP, + name=environ.get("FUNCTION_NAME", ""), + source=TransactionSource.COMPONENT, + origin=GcpIntegration.origin, + attributes=_prepopulate_attributes(gcp_event), + ): + try: + return func(functionhandler, gcp_event, *args, **kwargs) + except Exception: + exc_info = sys.exc_info() + sentry_event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "gcp", "handled": False}, + ) + sentry_sdk.capture_event(sentry_event, hint=hint) + reraise(*exc_info) + finally: + if timeout_thread: + timeout_thread.stop() + # Flush out the event queue + client.flush() return sentry_func # type: ignore @@ -232,3 +222,38 @@ def _get_google_cloud_logs_url(final_time): ) return url + + +ENV_TO_ATTRIBUTE = { + "FUNCTION_NAME": "faas.name", + "ENTRY_POINT": "gcp.function.entry_point", + "FUNCTION_IDENTITY": "gcp.function.identity", + "FUNCTION_REGION": "faas.region", + "GCP_PROJECT": "gcp.function.project", +} + +EVENT_TO_ATTRIBUTE = { + "method": "http.request.method", + "query_string": "url.query", +} + + +def _prepopulate_attributes(gcp_event): + # type: (Any) -> dict[str, Any] + attributes = { + "cloud.provider": "gcp", + } + + for key, attr in ENV_TO_ATTRIBUTE.items(): + if environ.get(key): + attributes[attr] = environ[key] + + for key, attr in EVENT_TO_ATTRIBUTE.items(): + if getattr(gcp_event, key, None): + attributes[attr] = getattr(gcp_event, key) + + if hasattr(gcp_event, "headers"): + headers = gcp_event.headers + attributes.update(_request_headers_to_span_attributes(headers)) + + return attributes diff --git a/sentry_sdk/integrations/graphene.py b/sentry_sdk/integrations/graphene.py index 00a8d155d4..9269a4403c 100644 --- a/sentry_sdk/integrations/graphene.py +++ b/sentry_sdk/integrations/graphene.py @@ -135,17 +135,10 @@ def graphql_span(schema, source, kwargs): }, ) - scope = sentry_sdk.get_current_scope() - if scope.span: - _graphql_span = scope.span.start_child(op=op, name=operation_name) - else: - _graphql_span = sentry_sdk.start_span(op=op, name=operation_name) - - _graphql_span.set_data("graphql.document", source) - _graphql_span.set_data("graphql.operation.name", operation_name) - _graphql_span.set_data("graphql.operation.type", operation_type) - - try: + with sentry_sdk.start_span( + op=op, name=operation_name, only_if_parent=True + ) as graphql_span: + graphql_span.set_attribute("graphql.document", source) + graphql_span.set_attribute("graphql.operation.name", operation_name) + graphql_span.set_attribute("graphql.operation.type", operation_type) yield - finally: - _graphql_span.finish() diff --git a/sentry_sdk/integrations/grpc/aio/client.py b/sentry_sdk/integrations/grpc/aio/client.py index ff3c213176..a8ea94276f 100644 --- a/sentry_sdk/integrations/grpc/aio/client.py +++ b/sentry_sdk/integrations/grpc/aio/client.py @@ -44,14 +44,17 @@ async def intercept_unary_unary( request: Message, ) -> Union[UnaryUnaryCall, Message]: method = client_call_details.method + if isinstance(method, bytes): + method = method.decode() with sentry_sdk.start_span( op=OP.GRPC_CLIENT, - name="unary unary call to %s" % method.decode(), + name="unary unary call to %s" % method, origin=SPAN_ORIGIN, + only_if_parent=True, ) as span: - span.set_data("type", "unary unary") - span.set_data("method", method) + span.set_attribute("type", "unary unary") + span.set_attribute("method", method) client_call_details = self._update_client_call_details_metadata_from_scope( client_call_details @@ -59,7 +62,7 @@ async def intercept_unary_unary( response = await continuation(client_call_details, request) status_code = await response.code() - span.set_data("code", status_code.name) + span.set_attribute("code", status_code.name) return response @@ -74,14 +77,17 @@ async def intercept_unary_stream( request: Message, ) -> Union[AsyncIterable[Any], UnaryStreamCall]: method = client_call_details.method + if isinstance(method, bytes): + method = method.decode() with sentry_sdk.start_span( op=OP.GRPC_CLIENT, - name="unary stream call to %s" % method.decode(), + name="unary stream call to %s" % method, origin=SPAN_ORIGIN, + only_if_parent=True, ) as span: - span.set_data("type", "unary stream") - span.set_data("method", method) + span.set_attribute("type", "unary stream") + span.set_attribute("method", method) client_call_details = self._update_client_call_details_metadata_from_scope( client_call_details @@ -89,6 +95,6 @@ async def intercept_unary_stream( response = await continuation(client_call_details, request) # status_code = await response.code() - # span.set_data("code", status_code) + # span.set_attribute("code", status_code) return response diff --git a/sentry_sdk/integrations/grpc/aio/server.py b/sentry_sdk/integrations/grpc/aio/server.py index 381c63103e..91c2e9d74f 100644 --- a/sentry_sdk/integrations/grpc/aio/server.py +++ b/sentry_sdk/integrations/grpc/aio/server.py @@ -2,7 +2,7 @@ from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable from sentry_sdk.integrations.grpc.consts import SPAN_ORIGIN -from sentry_sdk.tracing import Transaction, TransactionSource +from sentry_sdk.tracing import TransactionSource from sentry_sdk.utils import event_from_exception from typing import TYPE_CHECKING @@ -44,26 +44,24 @@ async def wrapped(request, context): return await handler(request, context) # What if the headers are empty? - transaction = Transaction.continue_from_headers( - dict(context.invocation_metadata()), - op=OP.GRPC_SERVER, - name=name, - source=TransactionSource.CUSTOM, - origin=SPAN_ORIGIN, - ) - - with sentry_sdk.start_transaction(transaction=transaction): - try: - return await handler.unary_unary(request, context) - except AbortError: - raise - except Exception as exc: - event, hint = event_from_exception( - exc, - mechanism={"type": "grpc", "handled": False}, - ) - sentry_sdk.capture_event(event, hint=hint) - raise + with sentry_sdk.continue_trace(dict(context.invocation_metadata())): + with sentry_sdk.start_span( + op=OP.GRPC_SERVER, + name=name, + source=TransactionSource.CUSTOM, + origin=SPAN_ORIGIN, + ): + try: + return await handler.unary_unary(request, context) + except AbortError: + raise + except Exception as exc: + event, hint = event_from_exception( + exc, + mechanism={"type": "grpc", "handled": False}, + ) + sentry_sdk.capture_event(event, hint=hint) + raise elif not handler.request_streaming and handler.response_streaming: handler_factory = grpc.unary_stream_rpc_method_handler diff --git a/sentry_sdk/integrations/grpc/client.py b/sentry_sdk/integrations/grpc/client.py index a5b4f9f52e..b7a1ddd85e 100644 --- a/sentry_sdk/integrations/grpc/client.py +++ b/sentry_sdk/integrations/grpc/client.py @@ -31,16 +31,17 @@ def intercept_unary_unary(self, continuation, client_call_details, request): op=OP.GRPC_CLIENT, name="unary unary call to %s" % method, origin=SPAN_ORIGIN, + only_if_parent=True, ) as span: - span.set_data("type", "unary unary") - span.set_data("method", method) + span.set_attribute("type", "unary unary") + span.set_attribute("method", method) client_call_details = self._update_client_call_details_metadata_from_scope( client_call_details ) response = continuation(client_call_details, request) - span.set_data("code", response.code().name) + span.set_attribute("code", response.code().name) return response @@ -52,9 +53,10 @@ def intercept_unary_stream(self, continuation, client_call_details, request): op=OP.GRPC_CLIENT, name="unary stream call to %s" % method, origin=SPAN_ORIGIN, + only_if_parent=True, ) as span: - span.set_data("type", "unary stream") - span.set_data("method", method) + span.set_attribute("type", "unary stream") + span.set_attribute("method", method) client_call_details = self._update_client_call_details_metadata_from_scope( client_call_details @@ -64,7 +66,7 @@ def intercept_unary_stream(self, continuation, client_call_details, request): client_call_details, request ) # type: UnaryStreamCall # Setting code on unary-stream leads to execution getting stuck - # span.set_data("code", response.code().name) + # span.set_attribute("code", response.code().name) return response diff --git a/sentry_sdk/integrations/grpc/server.py b/sentry_sdk/integrations/grpc/server.py index 0d2792d1b7..582ef6e24a 100644 --- a/sentry_sdk/integrations/grpc/server.py +++ b/sentry_sdk/integrations/grpc/server.py @@ -2,7 +2,7 @@ from sentry_sdk.consts import OP from sentry_sdk.integrations import DidNotEnable from sentry_sdk.integrations.grpc.consts import SPAN_ORIGIN -from sentry_sdk.tracing import Transaction, TransactionSource +from sentry_sdk.tracing import TransactionSource from typing import TYPE_CHECKING @@ -38,19 +38,17 @@ def behavior(request, context): if name: metadata = dict(context.invocation_metadata()) - transaction = Transaction.continue_from_headers( - metadata, - op=OP.GRPC_SERVER, - name=name, - source=TransactionSource.CUSTOM, - origin=SPAN_ORIGIN, - ) - - with sentry_sdk.start_transaction(transaction=transaction): - try: - return handler.unary_unary(request, context) - except BaseException as e: - raise e + with sentry_sdk.continue_trace(metadata): + with sentry_sdk.start_span( + op=OP.GRPC_SERVER, + name=name, + source=TransactionSource.CUSTOM, + origin=SPAN_ORIGIN, + ): + try: + return handler.unary_unary(request, context) + except BaseException as e: + raise e else: return handler.unary_unary(request, context) diff --git a/sentry_sdk/integrations/httpx.py b/sentry_sdk/integrations/httpx.py index 2ddd44489f..988778acd0 100644 --- a/sentry_sdk/integrations/httpx.py +++ b/sentry_sdk/integrations/httpx.py @@ -1,12 +1,12 @@ import sentry_sdk -from sentry_sdk.consts import OP, SPANDATA +from sentry_sdk.consts import OP, SPANDATA, BAGGAGE_HEADER_NAME from sentry_sdk.integrations import Integration, DidNotEnable -from sentry_sdk.tracing import BAGGAGE_HEADER_NAME from sentry_sdk.tracing_utils import Baggage, should_propagate_trace from sentry_sdk.utils import ( SENSITIVE_DATA_SUBSTITUTE, capture_internal_exceptions, ensure_integration_enabled, + http_client_status_to_breadcrumb_level, logger, parse_url, ) @@ -60,12 +60,18 @@ def send(self, request, **kwargs): parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE, ), origin=HttpxIntegration.origin, + only_if_parent=True, ) as span: - span.set_data(SPANDATA.HTTP_METHOD, request.method) + data = { + SPANDATA.HTTP_METHOD: request.method, + } if parsed_url is not None: - span.set_data("url", parsed_url.url) - span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) - span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) + data["url"] = parsed_url.url + data[SPANDATA.HTTP_QUERY] = parsed_url.query + data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment + + for key, value in data.items(): + span.set_attribute(key, value) if should_propagate_trace(sentry_sdk.get_client(), str(request.url)): for ( @@ -86,7 +92,17 @@ def send(self, request, **kwargs): rv = real_send(self, request, **kwargs) span.set_http_status(rv.status_code) - span.set_data("reason", rv.reason_phrase) + span.set_attribute("reason", rv.reason_phrase) + + data[SPANDATA.HTTP_STATUS_CODE] = rv.status_code + data["reason"] = rv.reason_phrase + + sentry_sdk.add_breadcrumb( + type="http", + category="httplib", + data=data, + level=http_client_status_to_breadcrumb_level(rv.status_code), + ) return rv @@ -114,12 +130,18 @@ async def send(self, request, **kwargs): parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE, ), origin=HttpxIntegration.origin, + only_if_parent=True, ) as span: - span.set_data(SPANDATA.HTTP_METHOD, request.method) + data = { + SPANDATA.HTTP_METHOD: request.method, + } if parsed_url is not None: - span.set_data("url", parsed_url.url) - span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) - span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) + data["url"] = parsed_url.url + data[SPANDATA.HTTP_QUERY] = parsed_url.query + data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment + + for key, value in data.items(): + span.set_attribute(key, value) if should_propagate_trace(sentry_sdk.get_client(), str(request.url)): for ( @@ -142,7 +164,17 @@ async def send(self, request, **kwargs): rv = await real_send(self, request, **kwargs) span.set_http_status(rv.status_code) - span.set_data("reason", rv.reason_phrase) + span.set_attribute("reason", rv.reason_phrase) + + data[SPANDATA.HTTP_STATUS_CODE] = rv.status_code + data["reason"] = rv.reason_phrase + + sentry_sdk.add_breadcrumb( + type="http", + category="httplib", + data=data, + level=http_client_status_to_breadcrumb_level(rv.status_code), + ) return rv diff --git a/sentry_sdk/integrations/huey.py b/sentry_sdk/integrations/huey.py index f0aff4c0dd..1d1c498843 100644 --- a/sentry_sdk/integrations/huey.py +++ b/sentry_sdk/integrations/huey.py @@ -2,15 +2,16 @@ from datetime import datetime import sentry_sdk -from sentry_sdk.api import continue_trace, get_baggage, get_traceparent -from sentry_sdk.consts import OP, SPANSTATUS -from sentry_sdk.integrations import DidNotEnable, Integration -from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import ( +from sentry_sdk.api import get_baggage, get_traceparent +from sentry_sdk.consts import ( + OP, + SPANSTATUS, BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME, TransactionSource, ) +from sentry_sdk.integrations import DidNotEnable, Integration +from sentry_sdk.scope import should_send_default_pii from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, @@ -61,6 +62,7 @@ def _sentry_enqueue(self, task): op=OP.QUEUE_SUBMIT_HUEY, name=task.name, origin=HueyIntegration.origin, + only_if_parent=True, ): if not isinstance(task, PeriodicTask): # Attach trace propagation data to task kwargs. We do @@ -109,11 +111,13 @@ def _capture_exception(exc_info): # type: (ExcInfo) -> None scope = sentry_sdk.get_current_scope() - if exc_info[0] in HUEY_CONTROL_FLOW_EXCEPTIONS: - scope.transaction.set_status(SPANSTATUS.ABORTED) - return + if scope.root_span is not None: + if exc_info[0] in HUEY_CONTROL_FLOW_EXCEPTIONS: + scope.root_span.set_status(SPANSTATUS.ABORTED) + return + + scope.root_span.set_status(SPANSTATUS.INTERNAL_ERROR) - scope.transaction.set_status(SPANSTATUS.INTERNAL_ERROR) event, hint = event_from_exception( exc_info, client_options=sentry_sdk.get_client().options, @@ -135,6 +139,10 @@ def _sentry_execute(*args, **kwargs): _capture_exception(exc_info) reraise(*exc_info) + root_span = sentry_sdk.get_current_scope().root_span + if root_span is not None: + root_span.set_status(SPANSTATUS.OK) + return result return _sentry_execute # type: ignore @@ -153,22 +161,18 @@ def _sentry_execute(self, task, timestamp=None): scope.clear_breadcrumbs() scope.add_event_processor(_make_event_processor(task)) - sentry_headers = task.kwargs.pop("sentry_headers", None) - - transaction = continue_trace( - sentry_headers or {}, - name=task.name, - op=OP.QUEUE_TASK_HUEY, - source=TransactionSource.TASK, - origin=HueyIntegration.origin, - ) - transaction.set_status(SPANSTATUS.OK) - if not getattr(task, "_sentry_is_patched", False): task.execute = _wrap_task_execute(task.execute) task._sentry_is_patched = True - with sentry_sdk.start_transaction(transaction): - return old_execute(self, task, timestamp) + sentry_headers = task.kwargs.pop("sentry_headers", {}) + with sentry_sdk.continue_trace(sentry_headers): + with sentry_sdk.start_span( + name=task.name, + op=OP.QUEUE_TASK_HUEY, + source=TransactionSource.TASK, + origin=HueyIntegration.origin, + ): + return old_execute(self, task, timestamp) Huey._execute = _sentry_execute diff --git a/sentry_sdk/integrations/huggingface_hub.py b/sentry_sdk/integrations/huggingface_hub.py index d09f6e2163..ae00618995 100644 --- a/sentry_sdk/integrations/huggingface_hub.py +++ b/sentry_sdk/integrations/huggingface_hub.py @@ -77,6 +77,7 @@ def new_text_generation(*args, **kwargs): op=consts.OP.HUGGINGFACE_HUB_CHAT_COMPLETIONS_CREATE, name="Text Generation", origin=HuggingfaceHubIntegration.origin, + only_if_parent=True, ) span.__enter__() try: diff --git a/sentry_sdk/integrations/langchain.py b/sentry_sdk/integrations/langchain.py index 431fc46bec..3d40ff1dbc 100644 --- a/sentry_sdk/integrations/langchain.py +++ b/sentry_sdk/integrations/langchain.py @@ -3,7 +3,7 @@ import sentry_sdk from sentry_sdk.ai.monitoring import set_ai_pipeline_name, record_token_usage -from sentry_sdk.consts import OP, SPANDATA +from sentry_sdk.consts import OP, SPANDATA, SPANSTATUS from sentry_sdk.ai.utils import set_data_normalized from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import Span @@ -72,7 +72,6 @@ def setup_once(): class WatchedSpan: - span = None # type: Span num_completion_tokens = 0 # type: int num_prompt_tokens = 0 # type: int no_collect_tokens = False # type: bool @@ -123,8 +122,9 @@ def _handle_error(self, run_id, error): span_data = self.span_map[run_id] if not span_data: return - sentry_sdk.capture_exception(error, span_data.span.scope) - span_data.span.__exit__(None, None, None) + sentry_sdk.capture_exception(error) + span_data.span.set_status(SPANSTATUS.INTERNAL_ERROR) + span_data.span.finish() del self.span_map[run_id] def _normalize_langchain_message(self, message): @@ -136,21 +136,27 @@ def _normalize_langchain_message(self, message): def _create_span(self, run_id, parent_id, **kwargs): # type: (SentryLangchainCallback, UUID, Optional[Any], Any) -> WatchedSpan - watched_span = None # type: Optional[WatchedSpan] - if parent_id: - parent_span = self.span_map.get(parent_id) # type: Optional[WatchedSpan] - if parent_span: - watched_span = WatchedSpan(parent_span.span.start_child(**kwargs)) - parent_span.children.append(watched_span) - if watched_span is None: - watched_span = WatchedSpan(sentry_sdk.start_span(**kwargs)) + parent_watched_span = self.span_map.get(parent_id) if parent_id else None + sentry_span = sentry_sdk.start_span( + parent_span=parent_watched_span.span if parent_watched_span else None, + only_if_parent=True, + **kwargs, + ) + watched_span = WatchedSpan(sentry_span) + if parent_watched_span: + parent_watched_span.children.append(watched_span) if kwargs.get("op", "").startswith("ai.pipeline."): if kwargs.get("name"): set_ai_pipeline_name(kwargs.get("name")) watched_span.is_pipeline = True - watched_span.span.__enter__() + # the same run_id is reused for the pipeline it seems + # so we need to end the older span to avoid orphan spans + existing_span_data = self.span_map.get(run_id) + if existing_span_data is not None: + self._exit_span(existing_span_data, run_id) + self.span_map[run_id] = watched_span self.gc_span_map() return watched_span @@ -161,7 +167,8 @@ def _exit_span(self, span_data, run_id): if span_data.is_pipeline: set_ai_pipeline_name(None) - span_data.span.__exit__(None, None, None) + span_data.span.set_status(SPANSTATUS.OK) + span_data.span.finish() del self.span_map[run_id] def on_llm_start( @@ -222,7 +229,7 @@ def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs): if not model and "anthropic" in all_params.get("_type"): model = "claude-2" if model: - span.set_data(SPANDATA.AI_MODEL_ID, model) + span.set_attribute(SPANDATA.AI_MODEL_ID, model) if should_send_default_pii() and self.include_prompts: set_data_normalized( span, diff --git a/sentry_sdk/integrations/litestar.py b/sentry_sdk/integrations/litestar.py index 5f0b32b04e..66b7040f6d 100644 --- a/sentry_sdk/integrations/litestar.py +++ b/sentry_sdk/integrations/litestar.py @@ -1,6 +1,6 @@ from collections.abc import Set import sentry_sdk -from sentry_sdk.consts import OP +from sentry_sdk.consts import OP, TransactionSource, SOURCE_FOR_STYLE from sentry_sdk.integrations import ( _DEFAULT_FAILED_REQUEST_STATUS_CODES, DidNotEnable, @@ -9,7 +9,6 @@ from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import TransactionSource, SOURCE_FOR_STYLE from sentry_sdk.utils import ( ensure_integration_enabled, event_from_exception, @@ -153,6 +152,7 @@ async def _create_span_call(self, scope, receive, send): op=OP.MIDDLEWARE_LITESTAR, name=middleware_name, origin=LitestarIntegration.origin, + only_if_parent=True, ) as middleware_span: middleware_span.set_tag("litestar.middleware_name", middleware_name) @@ -165,6 +165,7 @@ async def _sentry_receive(*args, **kwargs): op=OP.MIDDLEWARE_LITESTAR_RECEIVE, name=getattr(receive, "__qualname__", str(receive)), origin=LitestarIntegration.origin, + only_if_parent=True, ) as span: span.set_tag("litestar.middleware_name", middleware_name) return await receive(*args, **kwargs) @@ -182,6 +183,7 @@ async def _sentry_send(message): op=OP.MIDDLEWARE_LITESTAR_SEND, name=getattr(send, "__qualname__", str(send)), origin=LitestarIntegration.origin, + only_if_parent=True, ) as span: span.set_tag("litestar.middleware_name", middleware_name) return await send(message) diff --git a/sentry_sdk/integrations/logging.py b/sentry_sdk/integrations/logging.py index 46628bb04b..477139be3a 100644 --- a/sentry_sdk/integrations/logging.py +++ b/sentry_sdk/integrations/logging.py @@ -24,7 +24,7 @@ from typing import Optional DEFAULT_LEVEL = logging.INFO -DEFAULT_EVENT_LEVEL = logging.ERROR +DEFAULT_EVENT_LEVEL = None # None means no events are captured LOGGING_TO_EVENT_LEVEL = { logging.NOTSET: "notset", logging.DEBUG: "debug", @@ -43,7 +43,12 @@ # Note: Ignoring by logger name here is better than mucking with thread-locals. # We do not necessarily know whether thread-locals work 100% correctly in the user's environment. _IGNORED_LOGGERS = set( - ["sentry_sdk.errors", "urllib3.connectionpool", "urllib3.connection"] + [ + "sentry_sdk.errors", + "urllib3.connectionpool", + "urllib3.connection", + "opentelemetry.*", + ] ) diff --git a/sentry_sdk/integrations/openai.py b/sentry_sdk/integrations/openai.py index 61d335b170..c32a5c0278 100644 --- a/sentry_sdk/integrations/openai.py +++ b/sentry_sdk/integrations/openai.py @@ -139,6 +139,7 @@ def _new_chat_completion_common(f, *args, **kwargs): op=consts.OP.OPENAI_CHAT_COMPLETIONS_CREATE, name="Chat Completion", origin=OpenAIIntegration.origin, + only_if_parent=True, ) span.__enter__() @@ -324,6 +325,7 @@ def _new_embeddings_create_common(f, *args, **kwargs): op=consts.OP.OPENAI_EMBEDDINGS_CREATE, description="OpenAI Embedding Creation", origin=OpenAIIntegration.origin, + only_if_parent=True, ) as span: if "input" in kwargs and ( should_send_default_pii() and integration.include_prompts diff --git a/sentry_sdk/integrations/opentelemetry/__init__.py b/sentry_sdk/integrations/opentelemetry/__init__.py deleted file mode 100644 index 3c4c1a683d..0000000000 --- a/sentry_sdk/integrations/opentelemetry/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor -from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator - -__all__ = [ - "SentryPropagator", - "SentrySpanProcessor", -] diff --git a/sentry_sdk/integrations/opentelemetry/consts.py b/sentry_sdk/integrations/opentelemetry/consts.py deleted file mode 100644 index ec493449d3..0000000000 --- a/sentry_sdk/integrations/opentelemetry/consts.py +++ /dev/null @@ -1,5 +0,0 @@ -from opentelemetry.context import create_key - - -SENTRY_TRACE_KEY = create_key("sentry-trace") -SENTRY_BAGGAGE_KEY = create_key("sentry-baggage") diff --git a/sentry_sdk/integrations/opentelemetry/integration.py b/sentry_sdk/integrations/opentelemetry/integration.py deleted file mode 100644 index 43e0396c16..0000000000 --- a/sentry_sdk/integrations/opentelemetry/integration.py +++ /dev/null @@ -1,58 +0,0 @@ -""" -IMPORTANT: The contents of this file are part of a proof of concept and as such -are experimental and not suitable for production use. They may be changed or -removed at any time without prior notice. -""" - -from sentry_sdk.integrations import DidNotEnable, Integration -from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator -from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor -from sentry_sdk.utils import logger - -try: - from opentelemetry import trace - from opentelemetry.propagate import set_global_textmap - from opentelemetry.sdk.trace import TracerProvider -except ImportError: - raise DidNotEnable("opentelemetry not installed") - -try: - from opentelemetry.instrumentation.django import DjangoInstrumentor # type: ignore[import-not-found] -except ImportError: - DjangoInstrumentor = None - - -CONFIGURABLE_INSTRUMENTATIONS = { - DjangoInstrumentor: {"is_sql_commentor_enabled": True}, -} - - -class OpenTelemetryIntegration(Integration): - identifier = "opentelemetry" - - @staticmethod - def setup_once(): - # type: () -> None - logger.warning( - "[OTel] Initializing highly experimental OpenTelemetry support. " - "Use at your own risk." - ) - - _setup_sentry_tracing() - # _setup_instrumentors() - - logger.debug("[OTel] Finished setting up OpenTelemetry integration") - - -def _setup_sentry_tracing(): - # type: () -> None - provider = TracerProvider() - provider.add_span_processor(SentrySpanProcessor()) - trace.set_tracer_provider(provider) - set_global_textmap(SentryPropagator()) - - -def _setup_instrumentors(): - # type: () -> None - for instrumentor, kwargs in CONFIGURABLE_INSTRUMENTATIONS.items(): - instrumentor().instrument(**kwargs) diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py deleted file mode 100644 index e00562a509..0000000000 --- a/sentry_sdk/integrations/opentelemetry/span_processor.py +++ /dev/null @@ -1,391 +0,0 @@ -from datetime import datetime, timezone -from time import time -from typing import TYPE_CHECKING, cast - -from opentelemetry.context import get_value -from opentelemetry.sdk.trace import SpanProcessor, ReadableSpan as OTelSpan -from opentelemetry.semconv.trace import SpanAttributes -from opentelemetry.trace import ( - format_span_id, - format_trace_id, - get_current_span, - SpanKind, -) -from opentelemetry.trace.span import ( - INVALID_SPAN_ID, - INVALID_TRACE_ID, -) -from sentry_sdk import get_client, start_transaction -from sentry_sdk.consts import INSTRUMENTER, SPANSTATUS -from sentry_sdk.integrations.opentelemetry.consts import ( - SENTRY_BAGGAGE_KEY, - SENTRY_TRACE_KEY, -) -from sentry_sdk.scope import add_global_event_processor -from sentry_sdk.tracing import Transaction, Span as SentrySpan -from sentry_sdk.utils import Dsn - -from urllib3.util import parse_url as urlparse - -if TYPE_CHECKING: - from typing import Any, Optional, Union - from opentelemetry import context as context_api - from sentry_sdk._types import Event, Hint - -OPEN_TELEMETRY_CONTEXT = "otel" -SPAN_MAX_TIME_OPEN_MINUTES = 10 -SPAN_ORIGIN = "auto.otel" - - -def link_trace_context_to_error_event(event, otel_span_map): - # type: (Event, dict[str, Union[Transaction, SentrySpan]]) -> Event - client = get_client() - - if client.options["instrumenter"] != INSTRUMENTER.OTEL: - return event - - if hasattr(event, "type") and event["type"] == "transaction": - return event - - otel_span = get_current_span() - if not otel_span: - return event - - ctx = otel_span.get_span_context() - - if ctx.trace_id == INVALID_TRACE_ID or ctx.span_id == INVALID_SPAN_ID: - return event - - sentry_span = otel_span_map.get(format_span_id(ctx.span_id), None) - if not sentry_span: - return event - - contexts = event.setdefault("contexts", {}) - contexts.setdefault("trace", {}).update(sentry_span.get_trace_context()) - - return event - - -class SentrySpanProcessor(SpanProcessor): - """ - Converts OTel spans into Sentry spans so they can be sent to the Sentry backend. - """ - - # The mapping from otel span ids to sentry spans - otel_span_map = {} # type: dict[str, Union[Transaction, SentrySpan]] - - # The currently open spans. Elements will be discarded after SPAN_MAX_TIME_OPEN_MINUTES - open_spans = {} # type: dict[int, set[str]] - - def __new__(cls): - # type: () -> SentrySpanProcessor - if not hasattr(cls, "instance"): - cls.instance = super().__new__(cls) - - return cls.instance - - def __init__(self): - # type: () -> None - @add_global_event_processor - def global_event_processor(event, hint): - # type: (Event, Hint) -> Event - return link_trace_context_to_error_event(event, self.otel_span_map) - - def _prune_old_spans(self): - # type: (SentrySpanProcessor) -> None - """ - Prune spans that have been open for too long. - """ - current_time_minutes = int(time() / 60) - for span_start_minutes in list( - self.open_spans.keys() - ): # making a list because we change the dict - # prune empty open spans buckets - if self.open_spans[span_start_minutes] == set(): - self.open_spans.pop(span_start_minutes) - - # prune old buckets - elif current_time_minutes - span_start_minutes > SPAN_MAX_TIME_OPEN_MINUTES: - for span_id in self.open_spans.pop(span_start_minutes): - self.otel_span_map.pop(span_id, None) - - def on_start(self, otel_span, parent_context=None): - # type: (OTelSpan, Optional[context_api.Context]) -> None - client = get_client() - - if not client.dsn: - return - - try: - _ = Dsn(client.dsn) - except Exception: - return - - if client.options["instrumenter"] != INSTRUMENTER.OTEL: - return - - if not otel_span.get_span_context().is_valid: - return - - if self._is_sentry_span(otel_span): - return - - trace_data = self._get_trace_data(otel_span, parent_context) - - parent_span_id = trace_data["parent_span_id"] - sentry_parent_span = ( - self.otel_span_map.get(parent_span_id) if parent_span_id else None - ) - - start_timestamp = None - if otel_span.start_time is not None: - start_timestamp = datetime.fromtimestamp( - otel_span.start_time / 1e9, timezone.utc - ) # OTel spans have nanosecond precision - - sentry_span = None - if sentry_parent_span: - sentry_span = sentry_parent_span.start_child( - span_id=trace_data["span_id"], - name=otel_span.name, - start_timestamp=start_timestamp, - instrumenter=INSTRUMENTER.OTEL, - origin=SPAN_ORIGIN, - ) - else: - sentry_span = start_transaction( - name=otel_span.name, - span_id=trace_data["span_id"], - parent_span_id=parent_span_id, - trace_id=trace_data["trace_id"], - baggage=trace_data["baggage"], - start_timestamp=start_timestamp, - instrumenter=INSTRUMENTER.OTEL, - origin=SPAN_ORIGIN, - ) - - self.otel_span_map[trace_data["span_id"]] = sentry_span - - if otel_span.start_time is not None: - span_start_in_minutes = int( - otel_span.start_time / 1e9 / 60 - ) # OTel spans have nanosecond precision - self.open_spans.setdefault(span_start_in_minutes, set()).add( - trace_data["span_id"] - ) - - self._prune_old_spans() - - def on_end(self, otel_span): - # type: (OTelSpan) -> None - client = get_client() - - if client.options["instrumenter"] != INSTRUMENTER.OTEL: - return - - span_context = otel_span.get_span_context() - if not span_context.is_valid: - return - - span_id = format_span_id(span_context.span_id) - sentry_span = self.otel_span_map.pop(span_id, None) - if not sentry_span: - return - - sentry_span.op = otel_span.name - - self._update_span_with_otel_status(sentry_span, otel_span) - - if isinstance(sentry_span, Transaction): - sentry_span.name = otel_span.name - sentry_span.set_context( - OPEN_TELEMETRY_CONTEXT, self._get_otel_context(otel_span) - ) - self._update_transaction_with_otel_data(sentry_span, otel_span) - - else: - self._update_span_with_otel_data(sentry_span, otel_span) - - end_timestamp = None - if otel_span.end_time is not None: - end_timestamp = datetime.fromtimestamp( - otel_span.end_time / 1e9, timezone.utc - ) # OTel spans have nanosecond precision - - sentry_span.finish(end_timestamp=end_timestamp) - - if otel_span.start_time is not None: - span_start_in_minutes = int( - otel_span.start_time / 1e9 / 60 - ) # OTel spans have nanosecond precision - self.open_spans.setdefault(span_start_in_minutes, set()).discard(span_id) - - self._prune_old_spans() - - def _is_sentry_span(self, otel_span): - # type: (OTelSpan) -> bool - """ - Break infinite loop: - HTTP requests to Sentry are caught by OTel and send again to Sentry. - """ - otel_span_url = None - if otel_span.attributes is not None: - otel_span_url = otel_span.attributes.get(SpanAttributes.HTTP_URL) - otel_span_url = cast("Optional[str]", otel_span_url) - - dsn_url = None - client = get_client() - if client.dsn: - dsn_url = Dsn(client.dsn).netloc - - if otel_span_url and dsn_url and dsn_url in otel_span_url: - return True - - return False - - def _get_otel_context(self, otel_span): - # type: (OTelSpan) -> dict[str, Any] - """ - Returns the OTel context for Sentry. - See: https://develop.sentry.dev/sdk/performance/opentelemetry/#step-5-add-opentelemetry-context - """ - ctx = {} - - if otel_span.attributes: - ctx["attributes"] = dict(otel_span.attributes) - - if otel_span.resource.attributes: - ctx["resource"] = dict(otel_span.resource.attributes) - - return ctx - - def _get_trace_data(self, otel_span, parent_context): - # type: (OTelSpan, Optional[context_api.Context]) -> dict[str, Any] - """ - Extracts tracing information from one OTel span and its parent OTel context. - """ - trace_data = {} # type: dict[str, Any] - span_context = otel_span.get_span_context() - - span_id = format_span_id(span_context.span_id) - trace_data["span_id"] = span_id - - trace_id = format_trace_id(span_context.trace_id) - trace_data["trace_id"] = trace_id - - parent_span_id = ( - format_span_id(otel_span.parent.span_id) if otel_span.parent else None - ) - trace_data["parent_span_id"] = parent_span_id - - sentry_trace_data = get_value(SENTRY_TRACE_KEY, parent_context) - sentry_trace_data = cast("dict[str, Union[str, bool, None]]", sentry_trace_data) - trace_data["parent_sampled"] = ( - sentry_trace_data["parent_sampled"] if sentry_trace_data else None - ) - - baggage = get_value(SENTRY_BAGGAGE_KEY, parent_context) - trace_data["baggage"] = baggage - - return trace_data - - def _update_span_with_otel_status(self, sentry_span, otel_span): - # type: (SentrySpan, OTelSpan) -> None - """ - Set the Sentry span status from the OTel span - """ - if otel_span.status.is_unset: - return - - if otel_span.status.is_ok: - sentry_span.set_status(SPANSTATUS.OK) - return - - sentry_span.set_status(SPANSTATUS.INTERNAL_ERROR) - - def _update_span_with_otel_data(self, sentry_span, otel_span): - # type: (SentrySpan, OTelSpan) -> None - """ - Convert OTel span data and update the Sentry span with it. - This should eventually happen on the server when ingesting the spans. - """ - sentry_span.set_data("otel.kind", otel_span.kind) - - op = otel_span.name - description = otel_span.name - - if otel_span.attributes is not None: - for key, val in otel_span.attributes.items(): - sentry_span.set_data(key, val) - - http_method = otel_span.attributes.get(SpanAttributes.HTTP_METHOD) - http_method = cast("Optional[str]", http_method) - - db_query = otel_span.attributes.get(SpanAttributes.DB_SYSTEM) - - if http_method: - op = "http" - - if otel_span.kind == SpanKind.SERVER: - op += ".server" - elif otel_span.kind == SpanKind.CLIENT: - op += ".client" - - description = http_method - - peer_name = otel_span.attributes.get(SpanAttributes.NET_PEER_NAME, None) - if peer_name: - description += " {}".format(peer_name) - - target = otel_span.attributes.get(SpanAttributes.HTTP_TARGET, None) - if target: - description += " {}".format(target) - - if not peer_name and not target: - url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None) - url = cast("Optional[str]", url) - if url: - parsed_url = urlparse(url) - url = "{}://{}{}".format( - parsed_url.scheme, parsed_url.netloc, parsed_url.path - ) - description += " {}".format(url) - - status_code = otel_span.attributes.get( - SpanAttributes.HTTP_STATUS_CODE, None - ) - status_code = cast("Optional[int]", status_code) - if status_code: - sentry_span.set_http_status(status_code) - - elif db_query: - op = "db" - statement = otel_span.attributes.get(SpanAttributes.DB_STATEMENT, None) - statement = cast("Optional[str]", statement) - if statement: - description = statement - - sentry_span.op = op - sentry_span.description = description - - def _update_transaction_with_otel_data(self, sentry_span, otel_span): - # type: (SentrySpan, OTelSpan) -> None - if otel_span.attributes is None: - return - - http_method = otel_span.attributes.get(SpanAttributes.HTTP_METHOD) - - if http_method: - status_code = otel_span.attributes.get(SpanAttributes.HTTP_STATUS_CODE) - status_code = cast("Optional[int]", status_code) - if status_code: - sentry_span.set_http_status(status_code) - - op = "http" - - if otel_span.kind == SpanKind.SERVER: - op += ".server" - elif otel_span.kind == SpanKind.CLIENT: - op += ".client" - - sentry_span.op = op diff --git a/sentry_sdk/integrations/pymongo.py b/sentry_sdk/integrations/pymongo.py index f65ad73687..32cb294075 100644 --- a/sentry_sdk/integrations/pymongo.py +++ b/sentry_sdk/integrations/pymongo.py @@ -1,12 +1,11 @@ import copy -import json import sentry_sdk from sentry_sdk.consts import SPANSTATUS, SPANDATA, OP from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import Span -from sentry_sdk.utils import capture_internal_exceptions +from sentry_sdk.utils import capture_internal_exceptions, _serialize_span_attribute try: from pymongo import monitoring @@ -127,56 +126,50 @@ def started(self, event): command.pop("$clusterTime", None) command.pop("$signature", None) - tags = { - "db.name": event.database_name, + data = { + SPANDATA.DB_NAME: event.database_name, SPANDATA.DB_SYSTEM: "mongodb", SPANDATA.DB_OPERATION: event.command_name, SPANDATA.DB_MONGODB_COLLECTION: command.get(event.command_name), } try: - tags["net.peer.name"] = event.connection_id[0] - tags["net.peer.port"] = str(event.connection_id[1]) + data["net.peer.name"] = event.connection_id[0] + data["net.peer.port"] = str(event.connection_id[1]) except TypeError: pass - data = {"operation_ids": {}} # type: Dict[str, Any] - data["operation_ids"]["operation"] = event.operation_id - data["operation_ids"]["request"] = event.request_id - - data.update(_get_db_data(event)) - try: lsid = command.pop("lsid")["id"] - data["operation_ids"]["session"] = str(lsid) + data["session_id"] = str(lsid) except KeyError: pass if not should_send_default_pii(): command = _strip_pii(command) - query = json.dumps(command, default=str) + query = _serialize_span_attribute(command) span = sentry_sdk.start_span( op=OP.DB, name=query, origin=PyMongoIntegration.origin, + only_if_parent=True, ) - for tag, value in tags.items(): - # set the tag for backwards-compatibility. - # TODO: remove the set_tag call in the next major release! - span.set_tag(tag, value) - - span.set_data(tag, value) - - for key, value in data.items(): - span.set_data(key, value) - with capture_internal_exceptions(): sentry_sdk.add_breadcrumb( - message=query, category="query", type=OP.DB, data=tags + message=query, category="query", type=OP.DB, data=data ) + for key, value in data.items(): + span.set_attribute(key, value) + + for key, value in _get_db_data(event).items(): + span.set_attribute(key, value) + + span.set_attribute("operation_id", event.operation_id) + span.set_attribute("request_id", event.request_id) + self._ongoing_operations[self._operation_key(event)] = span.__enter__() def failed(self, event): diff --git a/sentry_sdk/integrations/pyramid.py b/sentry_sdk/integrations/pyramid.py index d1475ada65..a4d30e38a4 100644 --- a/sentry_sdk/integrations/pyramid.py +++ b/sentry_sdk/integrations/pyramid.py @@ -4,11 +4,11 @@ import weakref import sentry_sdk +from sentry_sdk.consts import SOURCE_FOR_STYLE from sentry_sdk.integrations import Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import RequestExtractor from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, diff --git a/sentry_sdk/integrations/quart.py b/sentry_sdk/integrations/quart.py index 51306bb4cd..68c1342216 100644 --- a/sentry_sdk/integrations/quart.py +++ b/sentry_sdk/integrations/quart.py @@ -3,11 +3,11 @@ from functools import wraps import sentry_sdk +from sentry_sdk.consts import SOURCE_FOR_STYLE from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations._wsgi_common import _filter_headers from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, ensure_integration_enabled, @@ -122,8 +122,8 @@ def decorator(old_func): def _sentry_func(*args, **kwargs): # type: (*Any, **Any) -> Any current_scope = sentry_sdk.get_current_scope() - if current_scope.transaction is not None: - current_scope.transaction.update_active_thread() + if current_scope.root_span is not None: + current_scope.root_span.update_active_thread() sentry_scope = sentry_sdk.get_isolation_scope() if sentry_scope.profile is not None: diff --git a/sentry_sdk/integrations/ray.py b/sentry_sdk/integrations/ray.py index 0842b92265..a0ec9713c1 100644 --- a/sentry_sdk/integrations/ray.py +++ b/sentry_sdk/integrations/ray.py @@ -26,6 +26,8 @@ from typing import Any, Optional from sentry_sdk.utils import ExcInfo +DEFAULT_TRANSACTION_NAME = "unknown Ray function" + def _check_sentry_initialized(): # type: () -> None @@ -58,25 +60,28 @@ def _f(*f_args, _tracing=None, **f_kwargs): """ _check_sentry_initialized() - transaction = sentry_sdk.continue_trace( - _tracing or {}, - op=OP.QUEUE_TASK_RAY, - name=qualname_from_function(f), - origin=RayIntegration.origin, + root_span_name = qualname_from_function(f) or DEFAULT_TRANSACTION_NAME + sentry_sdk.get_current_scope().set_transaction_name( + root_span_name, source=TransactionSource.TASK, ) - - with sentry_sdk.start_transaction(transaction) as transaction: - try: - result = f(*f_args, **f_kwargs) - transaction.set_status(SPANSTATUS.OK) - except Exception: - transaction.set_status(SPANSTATUS.INTERNAL_ERROR) - exc_info = sys.exc_info() - _capture_exception(exc_info) - reraise(*exc_info) - - return result + with sentry_sdk.continue_trace(_tracing or {}): + with sentry_sdk.start_span( + op=OP.QUEUE_TASK_RAY, + name=root_span_name, + origin=RayIntegration.origin, + source=TransactionSource.TASK, + ) as root_span: + try: + result = f(*f_args, **f_kwargs) + root_span.set_status(SPANSTATUS.OK) + except Exception: + root_span.set_status(SPANSTATUS.INTERNAL_ERROR) + exc_info = sys.exc_info() + _capture_exception(exc_info) + reraise(*exc_info) + + return result rv = old_remote(_f, *args, *kwargs) old_remote_method = rv.remote @@ -90,6 +95,7 @@ def _remote_method_with_header_propagation(*args, **kwargs): op=OP.QUEUE_SUBMIT_RAY, name=qualname_from_function(f), origin=RayIntegration.origin, + only_if_parent=True, ) as span: tracing = { k: v diff --git a/sentry_sdk/integrations/redis/_async_common.py b/sentry_sdk/integrations/redis/_async_common.py index 196e85e74b..e62aa1a807 100644 --- a/sentry_sdk/integrations/redis/_async_common.py +++ b/sentry_sdk/integrations/redis/_async_common.py @@ -3,14 +3,15 @@ from sentry_sdk.integrations.redis.consts import SPAN_ORIGIN from sentry_sdk.integrations.redis.modules.caches import ( _compile_cache_span_properties, - _set_cache_data, + _get_cache_data, ) from sentry_sdk.integrations.redis.modules.queries import _compile_db_span_properties from sentry_sdk.integrations.redis.utils import ( - _set_client_data, - _set_pipeline_data, + _create_breadcrumb, + _get_client_data, + _get_pipeline_data, + _update_span, ) -from sentry_sdk.tracing import Span from sentry_sdk.utils import capture_internal_exceptions from typing import TYPE_CHECKING @@ -23,9 +24,9 @@ def patch_redis_async_pipeline( - pipeline_cls, is_cluster, get_command_args_fn, set_db_data_fn + pipeline_cls, is_cluster, get_command_args_fn, get_db_data_fn ): - # type: (Union[type[Pipeline[Any]], type[ClusterPipeline[Any]]], bool, Any, Callable[[Span, Any], None]) -> None + # type: (Union[type[Pipeline[Any]], type[ClusterPipeline[Any]]], bool, Any, Callable[[Any], dict[str, Any]]) -> None old_execute = pipeline_cls.execute from sentry_sdk.integrations.redis import RedisIntegration @@ -39,24 +40,28 @@ async def _sentry_execute(self, *args, **kwargs): op=OP.DB_REDIS, name="redis.pipeline.execute", origin=SPAN_ORIGIN, + only_if_parent=True, ) as span: with capture_internal_exceptions(): - set_db_data_fn(span, self) - _set_pipeline_data( - span, - is_cluster, - get_command_args_fn, - False if is_cluster else self.is_transaction, - self._command_stack if is_cluster else self.command_stack, + span_data = get_db_data_fn(self) + pipeline_data = _get_pipeline_data( + is_cluster=is_cluster, + get_command_args_fn=get_command_args_fn, + is_transaction=False if is_cluster else self.is_transaction, + command_stack=( + self._command_stack if is_cluster else self.command_stack + ), ) + _update_span(span, span_data, pipeline_data) + _create_breadcrumb("redis.pipeline.execute", span_data, pipeline_data) return await old_execute(self, *args, **kwargs) pipeline_cls.execute = _sentry_execute # type: ignore -def patch_redis_async_client(cls, is_cluster, set_db_data_fn): - # type: (Union[type[StrictRedis[Any]], type[RedisCluster[Any]]], bool, Callable[[Span, Any], None]) -> None +def patch_redis_async_client(cls, is_cluster, get_db_data_fn): + # type: (Union[type[StrictRedis[Any]], type[RedisCluster[Any]]], bool, Callable[[Any], dict[str, Any]]) -> None old_execute_command = cls.execute_command from sentry_sdk.integrations.redis import RedisIntegration @@ -80,6 +85,7 @@ async def _sentry_execute_command(self, name, *args, **kwargs): op=cache_properties["op"], name=cache_properties["description"], origin=SPAN_ORIGIN, + only_if_parent=True, ) cache_span.__enter__() @@ -89,18 +95,24 @@ async def _sentry_execute_command(self, name, *args, **kwargs): op=db_properties["op"], name=db_properties["description"], origin=SPAN_ORIGIN, + only_if_parent=True, ) db_span.__enter__() - set_db_data_fn(db_span, self) - _set_client_data(db_span, is_cluster, name, *args) + db_span_data = get_db_data_fn(self) + db_client_span_data = _get_client_data(is_cluster, name, *args) + _update_span(db_span, db_span_data, db_client_span_data) + _create_breadcrumb( + db_properties["description"], db_span_data, db_client_span_data + ) value = await old_execute_command(self, name, *args, **kwargs) db_span.__exit__(None, None, None) if cache_span: - _set_cache_data(cache_span, self, cache_properties, value) + cache_span_data = _get_cache_data(self, cache_properties, value) + _update_span(cache_span, cache_span_data) cache_span.__exit__(None, None, None) return value diff --git a/sentry_sdk/integrations/redis/_sync_common.py b/sentry_sdk/integrations/redis/_sync_common.py index ef10e9e4f0..c2509eea9c 100644 --- a/sentry_sdk/integrations/redis/_sync_common.py +++ b/sentry_sdk/integrations/redis/_sync_common.py @@ -3,14 +3,15 @@ from sentry_sdk.integrations.redis.consts import SPAN_ORIGIN from sentry_sdk.integrations.redis.modules.caches import ( _compile_cache_span_properties, - _set_cache_data, + _get_cache_data, ) from sentry_sdk.integrations.redis.modules.queries import _compile_db_span_properties from sentry_sdk.integrations.redis.utils import ( - _set_client_data, - _set_pipeline_data, + _create_breadcrumb, + _get_client_data, + _get_pipeline_data, + _update_span, ) -from sentry_sdk.tracing import Span from sentry_sdk.utils import capture_internal_exceptions from typing import TYPE_CHECKING @@ -24,9 +25,9 @@ def patch_redis_pipeline( pipeline_cls, is_cluster, get_command_args_fn, - set_db_data_fn, + get_db_data_fn, ): - # type: (Any, bool, Any, Callable[[Span, Any], None]) -> None + # type: (Any, bool, Any, Callable[[Any], dict[str, Any]]) -> None old_execute = pipeline_cls.execute from sentry_sdk.integrations.redis import RedisIntegration @@ -40,24 +41,26 @@ def sentry_patched_execute(self, *args, **kwargs): op=OP.DB_REDIS, name="redis.pipeline.execute", origin=SPAN_ORIGIN, + only_if_parent=True, ) as span: with capture_internal_exceptions(): - set_db_data_fn(span, self) - _set_pipeline_data( - span, - is_cluster, - get_command_args_fn, - False if is_cluster else self.transaction, - self.command_stack, + span_data = get_db_data_fn(self) + pipeline_data = _get_pipeline_data( + is_cluster=is_cluster, + get_command_args_fn=get_command_args_fn, + is_transaction=False if is_cluster else self.transaction, + command_stack=self.command_stack, ) + _update_span(span, span_data, pipeline_data) + _create_breadcrumb("redis.pipeline.execute", span_data, pipeline_data) return old_execute(self, *args, **kwargs) pipeline_cls.execute = sentry_patched_execute -def patch_redis_client(cls, is_cluster, set_db_data_fn): - # type: (Any, bool, Callable[[Span, Any], None]) -> None +def patch_redis_client(cls, is_cluster, get_db_data_fn): + # type: (Any, bool, Callable[[Any], dict[str, Any]]) -> None """ This function can be used to instrument custom redis client classes or subclasses. @@ -85,6 +88,7 @@ def sentry_patched_execute_command(self, name, *args, **kwargs): op=cache_properties["op"], name=cache_properties["description"], origin=SPAN_ORIGIN, + only_if_parent=True, ) cache_span.__enter__() @@ -94,18 +98,24 @@ def sentry_patched_execute_command(self, name, *args, **kwargs): op=db_properties["op"], name=db_properties["description"], origin=SPAN_ORIGIN, + only_if_parent=True, ) db_span.__enter__() - set_db_data_fn(db_span, self) - _set_client_data(db_span, is_cluster, name, *args) + db_span_data = get_db_data_fn(self) + db_client_span_data = _get_client_data(is_cluster, name, *args) + _update_span(db_span, db_span_data, db_client_span_data) + _create_breadcrumb( + db_properties["description"], db_span_data, db_client_span_data + ) value = old_execute_command(self, name, *args, **kwargs) db_span.__exit__(None, None, None) if cache_span: - _set_cache_data(cache_span, self, cache_properties, value) + cache_span_data = _get_cache_data(self, cache_properties, value) + _update_span(cache_span, cache_span_data) cache_span.__exit__(None, None, None) return value diff --git a/sentry_sdk/integrations/redis/modules/caches.py b/sentry_sdk/integrations/redis/modules/caches.py index c6fc19f5b2..4ab33d2ea8 100644 --- a/sentry_sdk/integrations/redis/modules/caches.py +++ b/sentry_sdk/integrations/redis/modules/caches.py @@ -13,7 +13,6 @@ if TYPE_CHECKING: from sentry_sdk.integrations.redis import RedisIntegration - from sentry_sdk.tracing import Span from typing import Any, Optional @@ -75,22 +74,24 @@ def _get_cache_span_description(redis_command, args, kwargs, integration): return description -def _set_cache_data(span, redis_client, properties, return_value): - # type: (Span, Any, dict[str, Any], Optional[Any]) -> None +def _get_cache_data(redis_client, properties, return_value): + # type: (Any, dict[str, Any], Optional[Any]) -> dict[str, Any] + data = {} + with capture_internal_exceptions(): - span.set_data(SPANDATA.CACHE_KEY, properties["key"]) + data[SPANDATA.CACHE_KEY] = properties["key"] if properties["redis_command"] in GET_COMMANDS: if return_value is not None: - span.set_data(SPANDATA.CACHE_HIT, True) + data[SPANDATA.CACHE_HIT] = True size = ( len(str(return_value).encode("utf-8")) if not isinstance(return_value, bytes) else len(return_value) ) - span.set_data(SPANDATA.CACHE_ITEM_SIZE, size) + data[SPANDATA.CACHE_ITEM_SIZE] = size else: - span.set_data(SPANDATA.CACHE_HIT, False) + data[SPANDATA.CACHE_HIT] = False elif properties["redis_command"] in SET_COMMANDS: if properties["value"] is not None: @@ -99,7 +100,7 @@ def _set_cache_data(span, redis_client, properties, return_value): if not isinstance(properties["value"], bytes) else len(properties["value"]) ) - span.set_data(SPANDATA.CACHE_ITEM_SIZE, size) + data[SPANDATA.CACHE_ITEM_SIZE] = size try: connection_params = redis_client.connection_pool.connection_kwargs @@ -114,8 +115,10 @@ def _set_cache_data(span, redis_client, properties, return_value): host = connection_params.get("host") if host is not None: - span.set_data(SPANDATA.NETWORK_PEER_ADDRESS, host) + data[SPANDATA.NETWORK_PEER_ADDRESS] = host port = connection_params.get("port") if port is not None: - span.set_data(SPANDATA.NETWORK_PEER_PORT, port) + data[SPANDATA.NETWORK_PEER_PORT] = port + + return data diff --git a/sentry_sdk/integrations/redis/modules/queries.py b/sentry_sdk/integrations/redis/modules/queries.py index e0d85a4ef7..c070893ac8 100644 --- a/sentry_sdk/integrations/redis/modules/queries.py +++ b/sentry_sdk/integrations/redis/modules/queries.py @@ -11,7 +11,6 @@ if TYPE_CHECKING: from redis import Redis from sentry_sdk.integrations.redis import RedisIntegration - from sentry_sdk.tracing import Span from typing import Any @@ -43,26 +42,30 @@ def _get_db_span_description(integration, command_name, args): return description -def _set_db_data_on_span(span, connection_params): - # type: (Span, dict[str, Any]) -> None - span.set_data(SPANDATA.DB_SYSTEM, "redis") +def _get_connection_data(connection_params): + # type: (dict[str, Any]) -> dict[str, Any] + data = { + SPANDATA.DB_SYSTEM: "redis", + } db = connection_params.get("db") if db is not None: - span.set_data(SPANDATA.DB_NAME, str(db)) + data[SPANDATA.DB_NAME] = str(db) host = connection_params.get("host") if host is not None: - span.set_data(SPANDATA.SERVER_ADDRESS, host) + data[SPANDATA.SERVER_ADDRESS] = host port = connection_params.get("port") if port is not None: - span.set_data(SPANDATA.SERVER_PORT, port) + data[SPANDATA.SERVER_PORT] = port + + return data -def _set_db_data(span, redis_instance): - # type: (Span, Redis[Any]) -> None +def _get_db_data(redis_instance): + # type: (Redis[Any]) -> dict[str, Any] try: - _set_db_data_on_span(span, redis_instance.connection_pool.connection_kwargs) + return _get_connection_data(redis_instance.connection_pool.connection_kwargs) except AttributeError: - pass # connections_kwargs may be missing in some cases + return {} # connections_kwargs may be missing in some cases diff --git a/sentry_sdk/integrations/redis/rb.py b/sentry_sdk/integrations/redis/rb.py index 1b3e2e530c..68d3c3a9d6 100644 --- a/sentry_sdk/integrations/redis/rb.py +++ b/sentry_sdk/integrations/redis/rb.py @@ -5,7 +5,7 @@ """ from sentry_sdk.integrations.redis._sync_common import patch_redis_client -from sentry_sdk.integrations.redis.modules.queries import _set_db_data +from sentry_sdk.integrations.redis.modules.queries import _get_db_data def _patch_rb(): @@ -18,15 +18,15 @@ def _patch_rb(): patch_redis_client( rb.clients.FanoutClient, is_cluster=False, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) patch_redis_client( rb.clients.MappingClient, is_cluster=False, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) patch_redis_client( rb.clients.RoutingClient, is_cluster=False, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) diff --git a/sentry_sdk/integrations/redis/redis.py b/sentry_sdk/integrations/redis/redis.py index c92958a32d..935a828c3d 100644 --- a/sentry_sdk/integrations/redis/redis.py +++ b/sentry_sdk/integrations/redis/redis.py @@ -8,7 +8,7 @@ patch_redis_client, patch_redis_pipeline, ) -from sentry_sdk.integrations.redis.modules.queries import _set_db_data +from sentry_sdk.integrations.redis.modules.queries import _get_db_data from typing import TYPE_CHECKING @@ -26,13 +26,13 @@ def _patch_redis(StrictRedis, client): # noqa: N803 patch_redis_client( StrictRedis, is_cluster=False, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) patch_redis_pipeline( client.Pipeline, is_cluster=False, get_command_args_fn=_get_redis_command_args, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) try: strict_pipeline = client.StrictPipeline @@ -43,7 +43,7 @@ def _patch_redis(StrictRedis, client): # noqa: N803 strict_pipeline, is_cluster=False, get_command_args_fn=_get_redis_command_args, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) try: @@ -59,11 +59,11 @@ def _patch_redis(StrictRedis, client): # noqa: N803 patch_redis_async_client( redis.asyncio.client.StrictRedis, is_cluster=False, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) patch_redis_async_pipeline( redis.asyncio.client.Pipeline, False, _get_redis_command_args, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) diff --git a/sentry_sdk/integrations/redis/redis_cluster.py b/sentry_sdk/integrations/redis/redis_cluster.py index 80cdc7235a..7975e21083 100644 --- a/sentry_sdk/integrations/redis/redis_cluster.py +++ b/sentry_sdk/integrations/redis/redis_cluster.py @@ -9,7 +9,7 @@ patch_redis_client, patch_redis_pipeline, ) -from sentry_sdk.integrations.redis.modules.queries import _set_db_data_on_span +from sentry_sdk.integrations.redis.modules.queries import _get_connection_data from sentry_sdk.integrations.redis.utils import _parse_rediscluster_command from sentry_sdk.utils import capture_internal_exceptions @@ -23,29 +23,29 @@ RedisCluster as AsyncRedisCluster, ClusterPipeline as AsyncClusterPipeline, ) - from sentry_sdk.tracing import Span -def _set_async_cluster_db_data(span, async_redis_cluster_instance): - # type: (Span, AsyncRedisCluster[Any]) -> None +def _get_async_cluster_db_data(async_redis_cluster_instance): + # type: (AsyncRedisCluster[Any]) -> dict[str, Any] default_node = async_redis_cluster_instance.get_default_node() if default_node is not None and default_node.connection_kwargs is not None: - _set_db_data_on_span(span, default_node.connection_kwargs) + return _get_connection_data(default_node.connection_kwargs) + else: + return {} -def _set_async_cluster_pipeline_db_data(span, async_redis_cluster_pipeline_instance): - # type: (Span, AsyncClusterPipeline[Any]) -> None +def _get_async_cluster_pipeline_db_data(async_redis_cluster_pipeline_instance): + # type: (AsyncClusterPipeline[Any]) -> dict[str, Any] with capture_internal_exceptions(): - _set_async_cluster_db_data( - span, + return _get_async_cluster_db_data( # the AsyncClusterPipeline has always had a `_client` attr but it is private so potentially problematic and mypy # does not recognize it - see https://github.com/redis/redis-py/blame/v5.0.0/redis/asyncio/cluster.py#L1386 async_redis_cluster_pipeline_instance._client, # type: ignore[attr-defined] ) -def _set_cluster_db_data(span, redis_cluster_instance): - # type: (Span, RedisCluster[Any]) -> None +def _get_cluster_db_data(redis_cluster_instance): + # type: (RedisCluster[Any]) -> dict[str, Any] default_node = redis_cluster_instance.get_default_node() if default_node is not None: @@ -53,7 +53,9 @@ def _set_cluster_db_data(span, redis_cluster_instance): "host": default_node.host, "port": default_node.port, } - _set_db_data_on_span(span, connection_params) + return _get_connection_data(connection_params) + else: + return {} def _patch_redis_cluster(): @@ -67,13 +69,13 @@ def _patch_redis_cluster(): patch_redis_client( RedisCluster, is_cluster=True, - set_db_data_fn=_set_cluster_db_data, + get_db_data_fn=_get_cluster_db_data, ) patch_redis_pipeline( cluster.ClusterPipeline, is_cluster=True, get_command_args_fn=_parse_rediscluster_command, - set_db_data_fn=_set_cluster_db_data, + get_db_data_fn=_get_cluster_db_data, ) try: @@ -89,11 +91,11 @@ def _patch_redis_cluster(): patch_redis_async_client( async_cluster.RedisCluster, is_cluster=True, - set_db_data_fn=_set_async_cluster_db_data, + get_db_data_fn=_get_async_cluster_db_data, ) patch_redis_async_pipeline( async_cluster.ClusterPipeline, is_cluster=True, get_command_args_fn=_parse_rediscluster_command, - set_db_data_fn=_set_async_cluster_pipeline_db_data, + get_db_data_fn=_get_async_cluster_pipeline_db_data, ) diff --git a/sentry_sdk/integrations/redis/redis_py_cluster_legacy.py b/sentry_sdk/integrations/redis/redis_py_cluster_legacy.py index ad1c23633f..53b545c21b 100644 --- a/sentry_sdk/integrations/redis/redis_py_cluster_legacy.py +++ b/sentry_sdk/integrations/redis/redis_py_cluster_legacy.py @@ -9,7 +9,7 @@ patch_redis_client, patch_redis_pipeline, ) -from sentry_sdk.integrations.redis.modules.queries import _set_db_data +from sentry_sdk.integrations.redis.modules.queries import _get_db_data from sentry_sdk.integrations.redis.utils import _parse_rediscluster_command @@ -23,7 +23,7 @@ def _patch_rediscluster(): patch_redis_client( rediscluster.RedisCluster, is_cluster=True, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) # up to v1.3.6, __version__ attribute is a tuple @@ -37,7 +37,7 @@ def _patch_rediscluster(): patch_redis_client( rediscluster.StrictRedisCluster, is_cluster=True, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) else: pipeline_cls = rediscluster.pipeline.ClusterPipeline @@ -46,5 +46,5 @@ def _patch_rediscluster(): pipeline_cls, is_cluster=True, get_command_args_fn=_parse_rediscluster_command, - set_db_data_fn=_set_db_data, + get_db_data_fn=_get_db_data, ) diff --git a/sentry_sdk/integrations/redis/utils.py b/sentry_sdk/integrations/redis/utils.py index 27fae1e8ca..58130582ce 100644 --- a/sentry_sdk/integrations/redis/utils.py +++ b/sentry_sdk/integrations/redis/utils.py @@ -1,3 +1,4 @@ +import sentry_sdk from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.redis.consts import ( _COMMANDS_INCLUDING_SENSITIVE_DATA, @@ -16,6 +17,47 @@ from sentry_sdk.tracing import Span +TAG_KEYS = [ + "redis.command", + "redis.is_cluster", + "redis.key", + "redis.transaction", + SPANDATA.DB_OPERATION, +] + + +def _update_span(span, *data_bags): + # type: (Span, *dict[str, Any]) -> None + """ + Set tags and data on the given span to data from the given data bags. + """ + for data in data_bags: + for key, value in data.items(): + if key in TAG_KEYS: + span.set_tag(key, value) + else: + span.set_attribute(key, value) + + +def _create_breadcrumb(message, *data_bags): + # type: (str, *dict[str, Any]) -> None + """ + Create a breadcrumb containing the tags data from the given data bags. + """ + data = {} + for data in data_bags: + for key, value in data.items(): + if key in TAG_KEYS: + data[key] = value + + sentry_sdk.add_breadcrumb( + message=message, + type="redis", + category="redis", + data=data, + ) + + def _get_safe_command(name, args): # type: (str, Sequence[Any]) -> str command_parts = [name] @@ -105,12 +147,12 @@ def _parse_rediscluster_command(command): return command.args -def _set_pipeline_data( - span, is_cluster, get_command_args_fn, is_transaction, command_stack -): - # type: (Span, bool, Any, bool, Sequence[Any]) -> None - span.set_tag("redis.is_cluster", is_cluster) - span.set_tag("redis.transaction", is_transaction) +def _get_pipeline_data(is_cluster, get_command_args_fn, is_transaction, command_stack): + # type: (bool, Any, bool, Sequence[Any]) -> dict[str, Any] + data = { + "redis.is_cluster": is_cluster, + "redis.transaction": is_transaction, + } # type: dict[str, Any] commands = [] for i, arg in enumerate(command_stack): @@ -120,25 +162,27 @@ def _set_pipeline_data( command = get_command_args_fn(arg) commands.append(_get_safe_command(command[0], command[1:])) - span.set_data( - "redis.commands", - { - "count": len(command_stack), - "first_ten": commands, - }, - ) + data["redis.commands.count"] = len(command_stack) + data["redis.commands.first_ten"] = commands + + return data -def _set_client_data(span, is_cluster, name, *args): - # type: (Span, bool, str, *Any) -> None - span.set_tag("redis.is_cluster", is_cluster) +def _get_client_data(is_cluster, name, *args): + # type: (bool, str, *Any) -> dict[str, Any] + data = { + "redis.is_cluster": is_cluster, + } # type: dict[str, Any] + if name: - span.set_tag("redis.command", name) - span.set_tag(SPANDATA.DB_OPERATION, name) + data["redis.command"] = name + data[SPANDATA.DB_OPERATION] = name if name and args: name_low = name.lower() if (name_low in _SINGLE_KEY_COMMANDS) or ( name_low in _MULTI_KEY_COMMANDS and len(args) == 1 ): - span.set_tag("redis.key", args[0]) + data["redis.key"] = args[0] + + return data diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py index 6d7fcf723b..33910ed476 100644 --- a/sentry_sdk/integrations/rq.py +++ b/sentry_sdk/integrations/rq.py @@ -2,7 +2,6 @@ import sentry_sdk from sentry_sdk.consts import OP -from sentry_sdk.api import continue_trace from sentry_sdk.integrations import _check_minimum_version, DidNotEnable, Integration from sentry_sdk.integrations.logging import ignore_logger from sentry_sdk.tracing import TransactionSource @@ -33,6 +32,17 @@ from rq.job import Job +DEFAULT_TRANSACTION_NAME = "unknown RQ task" + + +JOB_PROPERTY_TO_ATTRIBUTE = { + "id": "messaging.message.id", +} + +QUEUE_PROPERTY_TO_ATTRIBUTE = { + "name": "messaging.destination.name", +} + class RqIntegration(Integration): identifier = "rq" @@ -47,28 +57,31 @@ def setup_once(): old_perform_job = Worker.perform_job @ensure_integration_enabled(RqIntegration, old_perform_job) - def sentry_patched_perform_job(self, job, *args, **kwargs): - # type: (Any, Job, *Queue, **Any) -> bool + def sentry_patched_perform_job(self, job, queue, *args, **kwargs): + # type: (Any, Job, Queue, *Any, **Any) -> bool with sentry_sdk.new_scope() as scope: - scope.clear_breadcrumbs() - scope.add_event_processor(_make_event_processor(weakref.ref(job))) + try: + transaction_name = job.func_name or DEFAULT_TRANSACTION_NAME + except AttributeError: + transaction_name = DEFAULT_TRANSACTION_NAME - transaction = continue_trace( - job.meta.get("_sentry_trace_headers") or {}, - op=OP.QUEUE_TASK_RQ, - name="unknown RQ task", - source=TransactionSource.TASK, - origin=RqIntegration.origin, + scope.set_transaction_name( + transaction_name, source=TransactionSource.TASK ) + scope.clear_breadcrumbs() + scope.add_event_processor(_make_event_processor(weakref.ref(job))) - with capture_internal_exceptions(): - transaction.name = job.func_name - - with sentry_sdk.start_transaction( - transaction, - custom_sampling_context={"rq_job": job}, + with sentry_sdk.continue_trace( + job.meta.get("_sentry_trace_headers") or {} ): - rv = old_perform_job(self, job, *args, **kwargs) + with sentry_sdk.start_span( + op=OP.QUEUE_TASK_RQ, + name=transaction_name, + source=TransactionSource.TASK, + origin=RqIntegration.origin, + attributes=_prepopulate_attributes(job, queue), + ): + rv = old_perform_job(self, job, queue, *args, **kwargs) if self.is_horse: # We're inside of a forked process and RQ is @@ -102,11 +115,9 @@ def sentry_patched_handle_exception(self, job, *exc_info, **kwargs): @ensure_integration_enabled(RqIntegration, old_enqueue_job) def sentry_patched_enqueue_job(self, job, **kwargs): # type: (Queue, Any, **Any) -> Any - scope = sentry_sdk.get_current_scope() - if scope.span is not None: - job.meta["_sentry_trace_headers"] = dict( - scope.iter_trace_propagation_headers() - ) + job.meta["_sentry_trace_headers"] = dict( + sentry_sdk.get_current_scope().iter_trace_propagation_headers() + ) return old_enqueue_job(self, job, **kwargs) @@ -159,3 +170,37 @@ def _capture_exception(exc_info, **kwargs): ) sentry_sdk.capture_event(event, hint=hint) + + +def _prepopulate_attributes(job, queue): + # type: (Job, Queue) -> dict[str, Any] + attributes = { + "messaging.system": "rq", + "rq.job.id": job.id, + } + + for prop, attr in JOB_PROPERTY_TO_ATTRIBUTE.items(): + if getattr(job, prop, None) is not None: + attributes[attr] = getattr(job, prop) + + for prop, attr in QUEUE_PROPERTY_TO_ATTRIBUTE.items(): + if getattr(queue, prop, None) is not None: + attributes[attr] = getattr(queue, prop) + + if getattr(job, "args", None): + for i, arg in enumerate(job.args): + with capture_internal_exceptions(): + attributes[f"rq.job.args.{i}"] = str(arg) + + if getattr(job, "kwargs", None): + for kwarg, value in job.kwargs.items(): + with capture_internal_exceptions(): + attributes[f"rq.job.kwargs.{kwarg}"] = str(value) + + func = job.func + if callable(func): + func = func.__name__ + + attributes["rq.job.func"] = str(func) + + return attributes diff --git a/sentry_sdk/integrations/rust_tracing.py b/sentry_sdk/integrations/rust_tracing.py index e4c211814f..acfe9bd7f4 100644 --- a/sentry_sdk/integrations/rust_tracing.py +++ b/sentry_sdk/integrations/rust_tracing.py @@ -32,16 +32,14 @@ import json from enum import Enum, auto -from typing import Any, Callable, Dict, Tuple, Optional +from typing import Any, Callable, Dict, Optional import sentry_sdk from sentry_sdk.integrations import Integration from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import Span as SentrySpan +from sentry_sdk.tracing import Span from sentry_sdk.utils import SENSITIVE_DATA_SUBSTITUTE -TraceState = Optional[Tuple[Optional[SentrySpan], SentrySpan]] - class RustTracingLevel(Enum): Trace = "TRACE" @@ -171,7 +169,7 @@ def _include_tracing_fields(self) -> bool: else self.include_tracing_fields ) - def on_event(self, event: str, _span_state: TraceState) -> None: + def on_event(self, event: str, _span_state: Optional[Span]) -> None: deserialized_event = json.loads(event) metadata = deserialized_event.get("metadata", {}) @@ -185,7 +183,7 @@ def on_event(self, event: str, _span_state: TraceState) -> None: elif event_type == EventTypeMapping.Event: process_event(deserialized_event) - def on_new_span(self, attrs: str, span_id: str) -> TraceState: + def on_new_span(self, attrs: str, span_id: str) -> Optional[Span]: attrs = json.loads(attrs) metadata = attrs.get("metadata", {}) @@ -205,48 +203,35 @@ def on_new_span(self, attrs: str, span_id: str) -> TraceState: else: sentry_span_name = "" - kwargs = { - "op": "function", - "name": sentry_span_name, - "origin": self.origin, - } - - scope = sentry_sdk.get_current_scope() - parent_sentry_span = scope.span - if parent_sentry_span: - sentry_span = parent_sentry_span.start_child(**kwargs) - else: - sentry_span = scope.start_span(**kwargs) + span = sentry_sdk.start_span( + op="function", + name=sentry_span_name, + origin=self.origin, + only_if_parent=True, + ) + span.__enter__() fields = metadata.get("fields", []) for field in fields: if self._include_tracing_fields(): - sentry_span.set_data(field, attrs.get(field)) - else: - sentry_span.set_data(field, SENSITIVE_DATA_SUBSTITUTE) - - scope.span = sentry_span - return (parent_sentry_span, sentry_span) - - def on_close(self, span_id: str, span_state: TraceState) -> None: - if span_state is None: - return - - parent_sentry_span, sentry_span = span_state - sentry_span.finish() - sentry_sdk.get_current_scope().span = parent_sentry_span - - def on_record(self, span_id: str, values: str, span_state: TraceState) -> None: - if span_state is None: - return - _parent_sentry_span, sentry_span = span_state - - deserialized_values = json.loads(values) - for key, value in deserialized_values.items(): - if self._include_tracing_fields(): - sentry_span.set_data(key, value) + span.set_attribute(field, attrs.get(field)) else: - sentry_span.set_data(key, SENSITIVE_DATA_SUBSTITUTE) + span.set_attribute(field, SENSITIVE_DATA_SUBSTITUTE) + + return span + + def on_close(self, span_id: str, span: Optional[Span]) -> None: + if span is not None: + span.__exit__(None, None, None) + + def on_record(self, span_id: str, values: str, span: Optional[Span]) -> None: + if span is not None: + deserialized_values = json.loads(values) + for key, value in deserialized_values.items(): + if self._include_tracing_fields(): + span.set_attribute(key, value) + else: + span.set_attribute(key, SENSITIVE_DATA_SUBSTITUTE) class RustTracingIntegration(Integration): diff --git a/sentry_sdk/integrations/sanic.py b/sentry_sdk/integrations/sanic.py index bd8f1f329b..06e30ffe31 100644 --- a/sentry_sdk/integrations/sanic.py +++ b/sentry_sdk/integrations/sanic.py @@ -4,7 +4,6 @@ from urllib.parse import urlsplit import sentry_sdk -from sentry_sdk import continue_trace from sentry_sdk.consts import OP from sentry_sdk.integrations import _check_minimum_version, Integration, DidNotEnable from sentry_sdk.integrations._wsgi_common import RequestExtractor, _filter_headers @@ -182,21 +181,25 @@ async def _context_enter(request): return weak_request = weakref.ref(request) - request.ctx._sentry_scope = sentry_sdk.isolation_scope() - scope = request.ctx._sentry_scope.__enter__() + request.ctx._sentry_scope_manager = sentry_sdk.isolation_scope() + scope = request.ctx._sentry_scope_manager.__enter__() + request.ctx._sentry_scope = scope + + scope.set_transaction_name(request.path, TransactionSource.URL) scope.clear_breadcrumbs() scope.add_event_processor(_make_request_processor(weak_request)) - transaction = continue_trace( - dict(request.headers), + # TODO-neel-potel test if this works + request.ctx._sentry_continue_trace = sentry_sdk.continue_trace( + dict(request.headers) + ) + request.ctx._sentry_continue_trace.__enter__() + request.ctx._sentry_transaction = sentry_sdk.start_span( op=OP.HTTP_SERVER, # Unless the request results in a 404 error, the name and source will get overwritten in _set_transaction name=request.path, source=TransactionSource.URL, origin=SanicIntegration.origin, - ) - request.ctx._sentry_transaction = sentry_sdk.start_transaction( - transaction ).__enter__() @@ -211,16 +214,23 @@ async def _context_exit(request, response=None): response_status = None if response is None else response.status # This capture_internal_exceptions block has been intentionally nested here, so that in case an exception - # happens while trying to end the transaction, we still attempt to exit the hub. + # happens while trying to end the transaction, we still attempt to exit the scope. with capture_internal_exceptions(): request.ctx._sentry_transaction.set_http_status(response_status) - request.ctx._sentry_transaction.sampled &= ( + + if ( isinstance(integration, SanicIntegration) - and response_status not in integration._unsampled_statuses - ) + and response_status in integration._unsampled_statuses + ): + # drop the event in an event processor + request.ctx._sentry_scope.add_event_processor( + lambda _event, _hint: None + ) + request.ctx._sentry_transaction.__exit__(None, None, None) + request.ctx._sentry_continue_trace.__exit__(None, None, None) - request.ctx._sentry_scope.__exit__(None, None, None) + request.ctx._sentry_scope_manager.__exit__(None, None, None) async def _set_transaction(request, route, **_): diff --git a/sentry_sdk/integrations/socket.py b/sentry_sdk/integrations/socket.py index babf61aa7a..544a63c0f0 100644 --- a/sentry_sdk/integrations/socket.py +++ b/sentry_sdk/integrations/socket.py @@ -61,10 +61,13 @@ def create_connection( op=OP.SOCKET_CONNECTION, name=_get_span_description(address[0], address[1]), origin=SocketIntegration.origin, + only_if_parent=True, ) as span: - span.set_data("address", address) - span.set_data("timeout", timeout) - span.set_data("source_address", source_address) + host, port = address + span.set_attribute("address.host", host) + span.set_attribute("address.port", port) + span.set_attribute("timeout", timeout) + span.set_attribute("source_address", source_address) return real_create_connection( address=address, timeout=timeout, source_address=source_address @@ -87,9 +90,10 @@ def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): op=OP.SOCKET_DNS, name=_get_span_description(host, port), origin=SocketIntegration.origin, + only_if_parent=True, ) as span: - span.set_data("host", host) - span.set_data("port", port) + span.set_attribute("host", host) + span.set_attribute("port", port) return real_getaddrinfo(host, port, family, type, proto, flags) diff --git a/sentry_sdk/integrations/sqlalchemy.py b/sentry_sdk/integrations/sqlalchemy.py index 068d373053..4c4d8fde8c 100644 --- a/sentry_sdk/integrations/sqlalchemy.py +++ b/sentry_sdk/integrations/sqlalchemy.py @@ -68,15 +68,15 @@ def _after_cursor_execute(conn, cursor, statement, parameters, context, *args): context, "_sentry_sql_span_manager", None ) # type: Optional[ContextManager[Any]] - if ctx_mgr is not None: - context._sentry_sql_span_manager = None - ctx_mgr.__exit__(None, None, None) - span = getattr(context, "_sentry_sql_span", None) # type: Optional[Span] if span is not None: with capture_internal_exceptions(): add_query_source(span) + if ctx_mgr is not None: + context._sentry_sql_span_manager = None + ctx_mgr.__exit__(None, None, None) + def _handle_error(context, *args): # type: (Any, *Any) -> None @@ -128,19 +128,19 @@ def _set_db_data(span, conn): # type: (Span, Any) -> None db_system = _get_db_system(conn.engine.name) if db_system is not None: - span.set_data(SPANDATA.DB_SYSTEM, db_system) + span.set_attribute(SPANDATA.DB_SYSTEM, db_system) if conn.engine.url is None: return db_name = conn.engine.url.database if db_name is not None: - span.set_data(SPANDATA.DB_NAME, db_name) + span.set_attribute(SPANDATA.DB_NAME, db_name) server_address = conn.engine.url.host if server_address is not None: - span.set_data(SPANDATA.SERVER_ADDRESS, server_address) + span.set_attribute(SPANDATA.SERVER_ADDRESS, server_address) server_port = conn.engine.url.port if server_port is not None: - span.set_data(SPANDATA.SERVER_PORT, server_port) + span.set_attribute(SPANDATA.SERVER_PORT, server_port) diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py index d0f0bf2045..e6016a3624 100644 --- a/sentry_sdk/integrations/starlette.py +++ b/sentry_sdk/integrations/starlette.py @@ -1,12 +1,11 @@ import asyncio import functools -import warnings from collections.abc import Set from copy import deepcopy from json import JSONDecodeError import sentry_sdk -from sentry_sdk.consts import OP +from sentry_sdk.consts import OP, SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.integrations import ( DidNotEnable, Integration, @@ -14,16 +13,11 @@ ) from sentry_sdk.integrations._wsgi_common import ( DEFAULT_HTTP_METHODS_TO_CAPTURE, - HttpCodeRangeContainer, _is_json_content_type, request_body_within_bounds, ) from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import ( - SOURCE_FOR_STYLE, - TransactionSource, -) from sentry_sdk.utils import ( AnnotatedValue, capture_internal_exceptions, @@ -37,9 +31,9 @@ from typing import TYPE_CHECKING if TYPE_CHECKING: - from typing import Any, Awaitable, Callable, Container, Dict, Optional, Tuple, Union + from typing import Any, Awaitable, Callable, Dict, Optional, Tuple - from sentry_sdk._types import Event, HttpStatusCodeRange + from sentry_sdk._types import Event try: import starlette # type: ignore @@ -89,7 +83,7 @@ class StarletteIntegration(Integration): def __init__( self, transaction_style="url", # type: str - failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES, # type: Union[Set[int], list[HttpStatusCodeRange], None] + failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES, # type: Set[int] middleware_spans=True, # type: bool http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: tuple[str, ...] ): @@ -103,24 +97,7 @@ def __init__( self.middleware_spans = middleware_spans self.http_methods_to_capture = tuple(map(str.upper, http_methods_to_capture)) - if isinstance(failed_request_status_codes, Set): - self.failed_request_status_codes = ( - failed_request_status_codes - ) # type: Container[int] - else: - warnings.warn( - "Passing a list or None for failed_request_status_codes is deprecated. " - "Please pass a set of int instead.", - DeprecationWarning, - stacklevel=2, - ) - - if failed_request_status_codes is None: - self.failed_request_status_codes = _DEFAULT_FAILED_REQUEST_STATUS_CODES - else: - self.failed_request_status_codes = HttpCodeRangeContainer( - failed_request_status_codes - ) + self.failed_request_status_codes = failed_request_status_codes @staticmethod def setup_once(): @@ -164,6 +141,7 @@ async def _create_span_call(app, scope, receive, send, **kwargs): op=OP.MIDDLEWARE_STARLETTE, name=middleware_name, origin=StarletteIntegration.origin, + only_if_parent=True, ) as middleware_span: middleware_span.set_tag("starlette.middleware_name", middleware_name) @@ -174,6 +152,7 @@ async def _sentry_receive(*args, **kwargs): op=OP.MIDDLEWARE_STARLETTE_RECEIVE, name=getattr(receive, "__qualname__", str(receive)), origin=StarletteIntegration.origin, + only_if_parent=True, ) as span: span.set_tag("starlette.middleware_name", middleware_name) return await receive(*args, **kwargs) @@ -189,6 +168,7 @@ async def _sentry_send(*args, **kwargs): op=OP.MIDDLEWARE_STARLETTE_SEND, name=getattr(send, "__qualname__", str(send)), origin=StarletteIntegration.origin, + only_if_parent=True, ) as span: span.set_tag("starlette.middleware_name", middleware_name) return await send(*args, **kwargs) @@ -329,7 +309,7 @@ def _add_user_to_sentry_scope(scope): user_info.setdefault("email", starlette_user.email) sentry_scope = sentry_sdk.get_isolation_scope() - sentry_scope.user = user_info + sentry_scope.set_user(user_info) def patch_authentication_middleware(middleware_class): @@ -493,8 +473,8 @@ def _sentry_sync_func(*args, **kwargs): return old_func(*args, **kwargs) current_scope = sentry_sdk.get_current_scope() - if current_scope.transaction is not None: - current_scope.transaction.update_active_thread() + if current_scope.root_span is not None: + current_scope.root_span.update_active_thread() sentry_scope = sentry_sdk.get_isolation_scope() if sentry_scope.profile is not None: diff --git a/sentry_sdk/integrations/starlite.py b/sentry_sdk/integrations/starlite.py index 24707a18b1..928c697373 100644 --- a/sentry_sdk/integrations/starlite.py +++ b/sentry_sdk/integrations/starlite.py @@ -1,9 +1,8 @@ import sentry_sdk -from sentry_sdk.consts import OP +from sentry_sdk.consts import OP, SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.integrations import DidNotEnable, Integration from sentry_sdk.integrations.asgi import SentryAsgiMiddleware from sentry_sdk.scope import should_send_default_pii -from sentry_sdk.tracing import SOURCE_FOR_STYLE, TransactionSource from sentry_sdk.utils import ( ensure_integration_enabled, event_from_exception, @@ -140,6 +139,7 @@ async def _create_span_call(self, scope, receive, send): op=OP.MIDDLEWARE_STARLITE, name=middleware_name, origin=StarliteIntegration.origin, + only_if_parent=True, ) as middleware_span: middleware_span.set_tag("starlite.middleware_name", middleware_name) @@ -152,6 +152,7 @@ async def _sentry_receive(*args, **kwargs): op=OP.MIDDLEWARE_STARLITE_RECEIVE, name=getattr(receive, "__qualname__", str(receive)), origin=StarliteIntegration.origin, + only_if_parent=True, ) as span: span.set_tag("starlite.middleware_name", middleware_name) return await receive(*args, **kwargs) @@ -169,6 +170,7 @@ async def _sentry_send(message): op=OP.MIDDLEWARE_STARLITE_SEND, name=getattr(send, "__qualname__", str(send)), origin=StarliteIntegration.origin, + only_if_parent=True, ) as span: span.set_tag("starlite.middleware_name", middleware_name) return await send(message) diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py index d388c5bca6..adc0de4f28 100644 --- a/sentry_sdk/integrations/stdlib.py +++ b/sentry_sdk/integrations/stdlib.py @@ -13,6 +13,8 @@ SENSITIVE_DATA_SUBSTITUTE, capture_internal_exceptions, ensure_integration_enabled, + get_current_thread_meta, + http_client_status_to_breadcrumb_level, is_sentry_url, logger, safe_repr, @@ -71,7 +73,7 @@ def putrequest(self, method, url, *args, **kwargs): client = sentry_sdk.get_client() if client.get_integration(StdlibIntegration) is None or is_sentry_url( - client, host + client, f"{host}:{port}" # noqa: E231 ): return real_putrequest(self, method, url, *args, **kwargs) @@ -93,12 +95,20 @@ def putrequest(self, method, url, *args, **kwargs): name="%s %s" % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE), origin="auto.http.stdlib.httplib", + only_if_parent=True, ) - span.set_data(SPANDATA.HTTP_METHOD, method) + span.__enter__() + + data = { + SPANDATA.HTTP_METHOD: method, + } if parsed_url is not None: - span.set_data("url", parsed_url.url) - span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query) - span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment) + data["url"] = parsed_url.url + data[SPANDATA.HTTP_QUERY] = parsed_url.query + data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment + + for key, value in data.items(): + span.set_attribute(key, value) rv = real_putrequest(self, method, url, *args, **kwargs) @@ -117,6 +127,7 @@ def putrequest(self, method, url, *args, **kwargs): self.putheader(key, value) self._sentrysdk_span = span # type: ignore[attr-defined] + self._sentrysdk_span_data = data # type: ignore[attr-defined] return rv @@ -130,10 +141,22 @@ def getresponse(self, *args, **kwargs): try: rv = real_getresponse(self, *args, **kwargs) - span.set_http_status(int(rv.status)) - span.set_data("reason", rv.reason) + span_data = getattr(self, "_sentrysdk_span_data", {}) + span_data[SPANDATA.HTTP_STATUS_CODE] = int(rv.status) + span_data["reason"] = rv.reason + + status_code = int(rv.status) + span.set_http_status(status_code) + span.set_attribute("reason", rv.reason) + + sentry_sdk.add_breadcrumb( + type="http", + category="httplib", + data=span_data, + level=http_client_status_to_breadcrumb_level(status_code), + ) finally: - span.finish() + span.__exit__(None, None, None) return rv @@ -207,6 +230,7 @@ def sentry_patched_popen_init(self, *a, **kw): op=OP.SUBPROCESS, name=description, origin="auto.subprocess.stdlib.subprocess", + only_if_parent=True, ) as span: for k, v in sentry_sdk.get_current_scope().iter_trace_propagation_headers( span=span @@ -222,11 +246,29 @@ def sentry_patched_popen_init(self, *a, **kw): env["SUBPROCESS_" + k.upper().replace("-", "_")] = v if cwd: - span.set_data("subprocess.cwd", cwd) + span.set_attribute("subprocess.cwd", cwd) rv = old_popen_init(self, *a, **kw) span.set_tag("subprocess.pid", self.pid) + + with capture_internal_exceptions(): + thread_id, thread_name = get_current_thread_meta() + breadcrumb_data = { + "subprocess.pid": self.pid, + "thread.id": thread_id, + "thread.name": thread_name, + } + if cwd: + breadcrumb_data["subprocess.cwd"] = cwd + + sentry_sdk.add_breadcrumb( + type="subprocess", + category="subprocess", + message=description, + data=breadcrumb_data, + ) + return rv subprocess.Popen.__init__ = sentry_patched_popen_init # type: ignore @@ -239,6 +281,7 @@ def sentry_patched_popen_wait(self, *a, **kw): with sentry_sdk.start_span( op=OP.SUBPROCESS_WAIT, origin="auto.subprocess.stdlib.subprocess", + only_if_parent=True, ) as span: span.set_tag("subprocess.pid", self.pid) return old_popen_wait(self, *a, **kw) @@ -253,6 +296,7 @@ def sentry_patched_popen_communicate(self, *a, **kw): with sentry_sdk.start_span( op=OP.SUBPROCESS_COMMUNICATE, origin="auto.subprocess.stdlib.subprocess", + only_if_parent=True, ) as span: span.set_tag("subprocess.pid", self.pid) return old_popen_communicate(self, *a, **kw) diff --git a/sentry_sdk/integrations/strawberry.py b/sentry_sdk/integrations/strawberry.py index ae7d273079..274ae8d1c9 100644 --- a/sentry_sdk/integrations/strawberry.py +++ b/sentry_sdk/integrations/strawberry.py @@ -107,14 +107,6 @@ def _sentry_patched_schema_init(self, *args, **kwargs): "False" if should_use_async_extension else "True", ) - # remove the built in strawberry sentry extension, if present - extensions = [ - extension - for extension in extensions - if extension - not in (StrawberrySentryAsyncExtension, StrawberrySentrySyncExtension) - ] - # add our extension extensions.append( SentryAsyncExtension if should_use_async_extension else SentrySyncExtension @@ -184,58 +176,52 @@ def on_operation(self): event_processor = _make_request_event_processor(self.execution_context) scope.add_event_processor(event_processor) - span = sentry_sdk.get_current_span() - if span: - self.graphql_span = span.start_child( - op=op, - name=description, - origin=StrawberryIntegration.origin, - ) - else: - self.graphql_span = sentry_sdk.start_span( - op=op, - name=description, - origin=StrawberryIntegration.origin, - ) + with sentry_sdk.start_span( + op=op, + name=description, + origin=StrawberryIntegration.origin, + only_if_parent=True, + ) as graphql_span: + graphql_span.set_attribute("graphql.operation.type", operation_type) + graphql_span.set_attribute("graphql.document", self.execution_context.query) + graphql_span.set_attribute("graphql.resource_name", self._resource_name) + + yield - self.graphql_span.set_data("graphql.operation.type", operation_type) - self.graphql_span.set_data("graphql.operation.name", self._operation_name) - self.graphql_span.set_data("graphql.document", self.execution_context.query) - self.graphql_span.set_data("graphql.resource_name", self._resource_name) + # we might have a more accurate operation_name after the parsing + self._operation_name = self.execution_context.operation_name - yield + if self._operation_name is not None: + graphql_span.set_attribute( + "graphql.operation.name", self._operation_name + ) - transaction = self.graphql_span.containing_transaction - if transaction and self.execution_context.operation_name: - transaction.name = self.execution_context.operation_name - transaction.source = TransactionSource.COMPONENT - transaction.op = op + sentry_sdk.get_current_scope().set_transaction_name( + self._operation_name, + source=TransactionSource.COMPONENT, + ) - self.graphql_span.finish() + root_span = graphql_span.root_span + if root_span: + root_span.op = op def on_validate(self): # type: () -> Generator[None, None, None] - self.validation_span = self.graphql_span.start_child( + with sentry_sdk.start_span( op=OP.GRAPHQL_VALIDATE, name="validation", origin=StrawberryIntegration.origin, - ) - - yield - - self.validation_span.finish() + ): + yield def on_parse(self): # type: () -> Generator[None, None, None] - self.parsing_span = self.graphql_span.start_child( + with sentry_sdk.start_span( op=OP.GRAPHQL_PARSE, name="parsing", origin=StrawberryIntegration.origin, - ) - - yield - - self.parsing_span.finish() + ): + yield def should_skip_tracing(self, _next, info): # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], GraphQLResolveInfo) -> bool @@ -257,15 +243,15 @@ async def resolve(self, _next, root, info, *args, **kwargs): field_path = "{}.{}".format(info.parent_type, info.field_name) - with self.graphql_span.start_child( + with sentry_sdk.start_span( op=OP.GRAPHQL_RESOLVE, name="resolving {}".format(field_path), origin=StrawberryIntegration.origin, ) as span: - span.set_data("graphql.field_name", info.field_name) - span.set_data("graphql.parent_type", info.parent_type.name) - span.set_data("graphql.field_path", field_path) - span.set_data("graphql.path", ".".join(map(str, info.path.as_list()))) + span.set_attribute("graphql.field_name", info.field_name) + span.set_attribute("graphql.parent_type", info.parent_type.name) + span.set_attribute("graphql.field_path", field_path) + span.set_attribute("graphql.path", ".".join(map(str, info.path.as_list()))) return await self._resolve(_next, root, info, *args, **kwargs) @@ -278,15 +264,15 @@ def resolve(self, _next, root, info, *args, **kwargs): field_path = "{}.{}".format(info.parent_type, info.field_name) - with self.graphql_span.start_child( + with sentry_sdk.start_span( op=OP.GRAPHQL_RESOLVE, name="resolving {}".format(field_path), origin=StrawberryIntegration.origin, ) as span: - span.set_data("graphql.field_name", info.field_name) - span.set_data("graphql.parent_type", info.parent_type.name) - span.set_data("graphql.field_path", field_path) - span.set_data("graphql.path", ".".join(map(str, info.path.as_list()))) + span.set_attribute("graphql.field_name", info.field_name) + span.set_attribute("graphql.parent_type", info.parent_type.name) + span.set_attribute("graphql.field_path", field_path) + span.set_attribute("graphql.path", ".".join(map(str, info.path.as_list()))) return _next(root, info, *args, **kwargs) @@ -383,11 +369,6 @@ def inner(event, hint): def _guess_if_using_async(extensions): # type: (List[SchemaExtension]) -> bool - if StrawberrySentryAsyncExtension in extensions: - return True - elif StrawberrySentrySyncExtension in extensions: - return False - return bool( {"starlette", "starlite", "litestar", "fastapi"} & set(_get_installed_modules()) ) diff --git a/sentry_sdk/integrations/threading.py b/sentry_sdk/integrations/threading.py index 9c99a8e896..8d0bb69f9d 100644 --- a/sentry_sdk/integrations/threading.py +++ b/sentry_sdk/integrations/threading.py @@ -4,12 +4,12 @@ from threading import Thread, current_thread import sentry_sdk +from sentry_sdk import Scope +from sentry_sdk.scope import ScopeType from sentry_sdk.integrations import Integration -from sentry_sdk.scope import use_isolation_scope, use_scope from sentry_sdk.utils import ( event_from_exception, capture_internal_exceptions, - logger, reraise, ) @@ -19,7 +19,6 @@ from typing import Any from typing import TypeVar from typing import Callable - from typing import Optional from sentry_sdk._types import ExcInfo @@ -29,22 +28,10 @@ class ThreadingIntegration(Integration): identifier = "threading" - def __init__(self, propagate_hub=None, propagate_scope=True): - # type: (Optional[bool], bool) -> None - if propagate_hub is not None: - logger.warning( - "Deprecated: propagate_hub is deprecated. This will be removed in the future." - ) - - # Note: propagate_hub did not have any effect on propagation of scope data - # scope data was always propagated no matter what the value of propagate_hub was - # This is why the default for propagate_scope is True - + def __init__(self, propagate_scope=True): + # type: (bool) -> None self.propagate_scope = propagate_scope - if propagate_hub is not None: - self.propagate_scope = propagate_hub - @staticmethod def setup_once(): # type: () -> None @@ -89,8 +76,8 @@ def sentry_start(self, *a, **kw): isolation_scope = sentry_sdk.get_isolation_scope().fork() current_scope = sentry_sdk.get_current_scope().fork() else: - isolation_scope = None - current_scope = None + isolation_scope = Scope(ty=ScopeType.ISOLATION) + current_scope = Scope(ty=ScopeType.CURRENT) # Patching instance methods in `start()` creates a reference cycle if # done in a naive way. See @@ -112,7 +99,7 @@ def sentry_start(self, *a, **kw): def _wrap_run(isolation_scope_to_use, current_scope_to_use, old_run_func): - # type: (Optional[sentry_sdk.Scope], Optional[sentry_sdk.Scope], F) -> F + # type: (sentry_sdk.Scope, sentry_sdk.Scope, F) -> F @wraps(old_run_func) def run(*a, **kw): # type: (*Any, **Any) -> Any @@ -124,12 +111,9 @@ def _run_old_run_func(): except Exception: reraise(*_capture_exception()) - if isolation_scope_to_use is not None and current_scope_to_use is not None: - with use_isolation_scope(isolation_scope_to_use): - with use_scope(current_scope_to_use): - return _run_old_run_func() - else: - return _run_old_run_func() + with sentry_sdk.use_isolation_scope(isolation_scope_to_use): + with sentry_sdk.use_scope(current_scope_to_use): + return _run_old_run_func() return run # type: ignore diff --git a/sentry_sdk/integrations/tornado.py b/sentry_sdk/integrations/tornado.py index 3cd087524a..70fb21ee14 100644 --- a/sentry_sdk/integrations/tornado.py +++ b/sentry_sdk/integrations/tornado.py @@ -3,7 +3,6 @@ from inspect import iscoroutinefunction import sentry_sdk -from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.scope import should_send_default_pii from sentry_sdk.tracing import TransactionSource @@ -20,13 +19,15 @@ RequestExtractor, _filter_headers, _is_json_content_type, + _request_headers_to_span_attributes, ) from sentry_sdk.integrations.logging import ignore_logger try: from tornado import version_info as TORNADO_VERSION - from tornado.web import RequestHandler, HTTPError from tornado.gen import coroutine + from tornado.httputil import HTTPServerRequest + from tornado.web import RequestHandler, HTTPError except ImportError: raise DidNotEnable("Tornado not installed") @@ -42,6 +43,14 @@ from sentry_sdk._types import Event, EventProcessor +REQUEST_PROPERTY_TO_ATTRIBUTE = { + "method": "http.request.method", + "path": "url.path", + "query": "url.query", + "protocol": "url.scheme", +} + + class TornadoIntegration(Integration): identifier = "tornado" origin = f"auto.http.{identifier}" @@ -111,22 +120,19 @@ def _handle_request_impl(self): processor = _make_event_processor(weak_handler) scope.add_event_processor(processor) - transaction = continue_trace( - headers, - op=OP.HTTP_SERVER, - # Like with all other integrations, this is our - # fallback transaction in case there is no route. - # sentry_urldispatcher_resolve is responsible for - # setting a transaction name later. - name="generic Tornado request", - source=TransactionSource.ROUTE, - origin=TornadoIntegration.origin, - ) - - with sentry_sdk.start_transaction( - transaction, custom_sampling_context={"tornado_request": self.request} - ): - yield + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span( + op=OP.HTTP_SERVER, + # Like with all other integrations, this is our + # fallback transaction in case there is no route. + # sentry_urldispatcher_resolve is responsible for + # setting a transaction name later. + name="generic Tornado request", + source=TransactionSource.ROUTE, + origin=TornadoIntegration.origin, + attributes=_prepopulate_attributes(self.request), + ): + yield @ensure_integration_enabled(TornadoIntegration) @@ -218,3 +224,36 @@ def files(self): def size_of_file(self, file): # type: (Any) -> int return len(file.body or ()) + + +def _prepopulate_attributes(request): + # type: (HTTPServerRequest) -> dict[str, Any] + # https://www.tornadoweb.org/en/stable/httputil.html#tornado.httputil.HTTPServerRequest + attributes = {} + + for prop, attr in REQUEST_PROPERTY_TO_ATTRIBUTE.items(): + if getattr(request, prop, None) is not None: + attributes[attr] = getattr(request, prop) + + if getattr(request, "version", None): + try: + proto, version = request.version.split("/") + attributes["network.protocol.name"] = proto + attributes["network.protocol.version"] = version + except ValueError: + attributes["network.protocol.name"] = request.version + + if getattr(request, "host", None): + try: + address, port = request.host.split(":") + attributes["server.address"] = address + attributes["server.port"] = port + except ValueError: + attributes["server.address"] = request.host + + with capture_internal_exceptions(): + attributes["url.full"] = request.full_url() + + attributes.update(_request_headers_to_span_attributes(request.headers)) + + return attributes diff --git a/sentry_sdk/integrations/trytond.py b/sentry_sdk/integrations/trytond.py index 2c44c593a4..fd2c6f389f 100644 --- a/sentry_sdk/integrations/trytond.py +++ b/sentry_sdk/integrations/trytond.py @@ -1,8 +1,9 @@ import sentry_sdk -from sentry_sdk.integrations import Integration +from sentry_sdk.integrations import _check_minimum_version, Integration from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware from sentry_sdk.utils import ensure_integration_enabled, event_from_exception +from trytond import __version__ as trytond_version # type: ignore from trytond.exceptions import TrytonException # type: ignore from trytond.wsgi import app # type: ignore @@ -19,6 +20,8 @@ def __init__(self): # type: () -> None @staticmethod def setup_once(): # type: () -> None + _check_minimum_version(TrytondWSGIIntegration, trytond_version) + app.wsgi_app = SentryWsgiMiddleware( app.wsgi_app, span_origin=TrytondWSGIIntegration.origin, diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py index e628e50e69..88708d6080 100644 --- a/sentry_sdk/integrations/wsgi.py +++ b/sentry_sdk/integrations/wsgi.py @@ -3,17 +3,15 @@ import sentry_sdk from sentry_sdk._werkzeug import get_host, _get_headers -from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk.scope import should_send_default_pii from sentry_sdk.integrations._wsgi_common import ( DEFAULT_HTTP_METHODS_TO_CAPTURE, _filter_headers, - nullcontext, + _request_headers_to_span_attributes, ) from sentry_sdk.sessions import track_session -from sentry_sdk.scope import use_isolation_scope -from sentry_sdk.tracing import Transaction, TransactionSource +from sentry_sdk.tracing import Span, TransactionSource from sentry_sdk.utils import ( ContextVar, capture_internal_exceptions, @@ -48,6 +46,17 @@ def __call__(self, status, response_headers, exc_info=None): # type: ignore _wsgi_middleware_applied = ContextVar("sentry_wsgi_middleware_applied") +DEFAULT_TRANSACTION_NAME = "generic WSGI request" + +ENVIRON_TO_ATTRIBUTE = { + "PATH_INFO": "url.path", + "QUERY_STRING": "url.query", + "REQUEST_METHOD": "http.request.method", + "SERVER_NAME": "server.address", + "SERVER_PORT": "server.port", + "wsgi.url_scheme": "url.scheme", +} + def wsgi_decoding_dance(s, charset="utf-8", errors="replace"): # type: (str, str, str) -> str @@ -81,7 +90,7 @@ def __init__( self, app, # type: Callable[[Dict[str, str], Callable[..., Any]], Any] use_x_forwarded_for=False, # type: bool - span_origin="manual", # type: str + span_origin=None, # type: Optional[str] http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: Tuple[str, ...] ): # type: (...) -> None @@ -98,6 +107,10 @@ def __call__(self, environ, start_response): _wsgi_middleware_applied.set(True) try: with sentry_sdk.isolation_scope() as scope: + scope.set_transaction_name( + DEFAULT_TRANSACTION_NAME, source=TransactionSource.ROUTE + ) + with track_session(scope, session_mode="request"): with capture_internal_exceptions(): scope.clear_breadcrumbs() @@ -107,44 +120,48 @@ def __call__(self, environ, start_response): environ, self.use_x_forwarded_for ) ) - method = environ.get("REQUEST_METHOD", "").upper() - transaction = None - if method in self.http_methods_to_capture: - transaction = continue_trace( - environ, - op=OP.HTTP_SERVER, - name="generic WSGI request", - source=TransactionSource.ROUTE, - origin=self.span_origin, - ) - - with ( - sentry_sdk.start_transaction( - transaction, - custom_sampling_context={"wsgi_environ": environ}, - ) - if transaction is not None - else nullcontext() - ): - try: - response = self.app( - environ, - partial( - _sentry_start_response, start_response, transaction + should_trace = method in self.http_methods_to_capture + if should_trace: + with sentry_sdk.continue_trace(environ): + with sentry_sdk.start_span( + op=OP.HTTP_SERVER, + name=DEFAULT_TRANSACTION_NAME, + source=TransactionSource.ROUTE, + origin=self.span_origin, + attributes=_prepopulate_attributes( + environ, self.use_x_forwarded_for ), - ) - except BaseException: - reraise(*_capture_exception()) + ) as span: + response = self._run_original_app( + environ, start_response, span + ) + else: + response = self._run_original_app(environ, start_response, None) + finally: _wsgi_middleware_applied.set(False) return _ScopedResponse(scope, response) + def _run_original_app(self, environ, start_response, span): + # type: (dict[str, str], StartResponse, Optional[Span]) -> Any + try: + return self.app( + environ, + partial( + _sentry_start_response, + start_response, + span, + ), + ) + except BaseException: + reraise(*_capture_exception()) + def _sentry_start_response( # type: ignore old_start_response, # type: StartResponse - transaction, # type: Optional[Transaction] + span, # type: Optional[Span] status, # type: str response_headers, # type: WsgiResponseHeaders exc_info=None, # type: Optional[WsgiExcInfo] @@ -152,8 +169,8 @@ def _sentry_start_response( # type: ignore # type: (...) -> WsgiResponseIter with capture_internal_exceptions(): status_int = int(status.split(" ", 1)[0]) - if transaction is not None: - transaction.set_http_status(status_int) + if span is not None: + span.set_http_status(status_int) if exc_info is None: # The Django Rest Framework WSGI test client, and likely other @@ -237,7 +254,7 @@ class _ScopedResponse: __slots__ = ("_response", "_scope") def __init__(self, scope, response): - # type: (sentry_sdk.scope.Scope, Iterator[bytes]) -> None + # type: (sentry_sdk.Scope, Iterator[bytes]) -> None self._scope = scope self._response = response @@ -246,7 +263,7 @@ def __iter__(self): iterator = iter(self._response) while True: - with use_isolation_scope(self._scope): + with sentry_sdk.use_isolation_scope(self._scope): try: chunk = next(iterator) except StopIteration: @@ -258,7 +275,7 @@ def __iter__(self): def close(self): # type: () -> None - with use_isolation_scope(self._scope): + with sentry_sdk.use_isolation_scope(self._scope): try: self._response.close() # type: ignore except AttributeError: @@ -308,3 +325,32 @@ def event_processor(event, hint): return event return event_processor + + +def _prepopulate_attributes(wsgi_environ, use_x_forwarded_for=False): + # type: (dict[str, str], bool) -> dict[str, str] + """Extract span attributes from the WSGI environment.""" + attributes = {} + + for property, attr in ENVIRON_TO_ATTRIBUTE.items(): + if wsgi_environ.get(property) is not None: + attributes[attr] = wsgi_environ[property] + + if wsgi_environ.get("SERVER_PROTOCOL") is not None: + try: + proto, version = wsgi_environ["SERVER_PROTOCOL"].split("/") + attributes["network.protocol.name"] = proto + attributes["network.protocol.version"] = version + except Exception: + attributes["network.protocol.name"] = wsgi_environ["SERVER_PROTOCOL"] + + with capture_internal_exceptions(): + url = get_request_url(wsgi_environ, use_x_forwarded_for) + query = wsgi_environ.get("QUERY_STRING") + attributes["url.full"] = f"{url}?{query}" + + attributes.update( + _request_headers_to_span_attributes(dict(_get_headers(wsgi_environ))) + ) + + return attributes diff --git a/sentry_sdk/metrics.py b/sentry_sdk/metrics.py deleted file mode 100644 index 4bdbc62253..0000000000 --- a/sentry_sdk/metrics.py +++ /dev/null @@ -1,965 +0,0 @@ -import io -import os -import random -import re -import sys -import threading -import time -import warnings -import zlib -from abc import ABC, abstractmethod -from contextlib import contextmanager -from datetime import datetime, timezone -from functools import wraps, partial - -import sentry_sdk -from sentry_sdk.utils import ( - ContextVar, - now, - nanosecond_time, - to_timestamp, - serialize_frame, - json_dumps, -) -from sentry_sdk.envelope import Envelope, Item -from sentry_sdk.tracing import TransactionSource - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from typing import Any - from typing import Callable - from typing import Dict - from typing import Generator - from typing import Iterable - from typing import List - from typing import Optional - from typing import Set - from typing import Tuple - from typing import Union - - from sentry_sdk._types import BucketKey - from sentry_sdk._types import DurationUnit - from sentry_sdk._types import FlushedMetricValue - from sentry_sdk._types import MeasurementUnit - from sentry_sdk._types import MetricMetaKey - from sentry_sdk._types import MetricTagValue - from sentry_sdk._types import MetricTags - from sentry_sdk._types import MetricTagsInternal - from sentry_sdk._types import MetricType - from sentry_sdk._types import MetricValue - - -warnings.warn( - "The sentry_sdk.metrics module is deprecated and will be removed in the next major release. " - "Sentry will reject all metrics sent after October 7, 2024. " - "Learn more: https://sentry.zendesk.com/hc/en-us/articles/26369339769883-Upcoming-API-Changes-to-Metrics", - DeprecationWarning, - stacklevel=2, -) - -_in_metrics = ContextVar("in_metrics", default=False) -_set = set # set is shadowed below - -GOOD_TRANSACTION_SOURCES = frozenset( - [ - TransactionSource.ROUTE, - TransactionSource.VIEW, - TransactionSource.COMPONENT, - TransactionSource.TASK, - ] -) - -_sanitize_unit = partial(re.compile(r"[^a-zA-Z0-9_]+").sub, "") -_sanitize_metric_key = partial(re.compile(r"[^a-zA-Z0-9_\-.]+").sub, "_") -_sanitize_tag_key = partial(re.compile(r"[^a-zA-Z0-9_\-.\/]+").sub, "") - - -def _sanitize_tag_value(value): - # type: (str) -> str - table = str.maketrans( - { - "\n": "\\n", - "\r": "\\r", - "\t": "\\t", - "\\": "\\\\", - "|": "\\u{7c}", - ",": "\\u{2c}", - } - ) - return value.translate(table) - - -def get_code_location(stacklevel): - # type: (int) -> Optional[Dict[str, Any]] - try: - frm = sys._getframe(stacklevel) - except Exception: - return None - - return serialize_frame( - frm, include_local_variables=False, include_source_context=True - ) - - -@contextmanager -def recursion_protection(): - # type: () -> Generator[bool, None, None] - """Enters recursion protection and returns the old flag.""" - old_in_metrics = _in_metrics.get() - _in_metrics.set(True) - try: - yield old_in_metrics - finally: - _in_metrics.set(old_in_metrics) - - -def metrics_noop(func): - # type: (Any) -> Any - """Convenient decorator that uses `recursion_protection` to - make a function a noop. - """ - - @wraps(func) - def new_func(*args, **kwargs): - # type: (*Any, **Any) -> Any - with recursion_protection() as in_metrics: - if not in_metrics: - return func(*args, **kwargs) - - return new_func - - -class Metric(ABC): - __slots__ = () - - @abstractmethod - def __init__(self, first): - # type: (MetricValue) -> None - pass - - @property - @abstractmethod - def weight(self): - # type: () -> int - pass - - @abstractmethod - def add(self, value): - # type: (MetricValue) -> None - pass - - @abstractmethod - def serialize_value(self): - # type: () -> Iterable[FlushedMetricValue] - pass - - -class CounterMetric(Metric): - __slots__ = ("value",) - - def __init__( - self, first # type: MetricValue - ): - # type: (...) -> None - self.value = float(first) - - @property - def weight(self): - # type: (...) -> int - return 1 - - def add( - self, value # type: MetricValue - ): - # type: (...) -> None - self.value += float(value) - - def serialize_value(self): - # type: (...) -> Iterable[FlushedMetricValue] - return (self.value,) - - -class GaugeMetric(Metric): - __slots__ = ( - "last", - "min", - "max", - "sum", - "count", - ) - - def __init__( - self, first # type: MetricValue - ): - # type: (...) -> None - first = float(first) - self.last = first - self.min = first - self.max = first - self.sum = first - self.count = 1 - - @property - def weight(self): - # type: (...) -> int - # Number of elements. - return 5 - - def add( - self, value # type: MetricValue - ): - # type: (...) -> None - value = float(value) - self.last = value - self.min = min(self.min, value) - self.max = max(self.max, value) - self.sum += value - self.count += 1 - - def serialize_value(self): - # type: (...) -> Iterable[FlushedMetricValue] - return ( - self.last, - self.min, - self.max, - self.sum, - self.count, - ) - - -class DistributionMetric(Metric): - __slots__ = ("value",) - - def __init__( - self, first # type: MetricValue - ): - # type(...) -> None - self.value = [float(first)] - - @property - def weight(self): - # type: (...) -> int - return len(self.value) - - def add( - self, value # type: MetricValue - ): - # type: (...) -> None - self.value.append(float(value)) - - def serialize_value(self): - # type: (...) -> Iterable[FlushedMetricValue] - return self.value - - -class SetMetric(Metric): - __slots__ = ("value",) - - def __init__( - self, first # type: MetricValue - ): - # type: (...) -> None - self.value = {first} - - @property - def weight(self): - # type: (...) -> int - return len(self.value) - - def add( - self, value # type: MetricValue - ): - # type: (...) -> None - self.value.add(value) - - def serialize_value(self): - # type: (...) -> Iterable[FlushedMetricValue] - def _hash(x): - # type: (MetricValue) -> int - if isinstance(x, str): - return zlib.crc32(x.encode("utf-8")) & 0xFFFFFFFF - return int(x) - - return (_hash(value) for value in self.value) - - -def _encode_metrics(flushable_buckets): - # type: (Iterable[Tuple[int, Dict[BucketKey, Metric]]]) -> bytes - out = io.BytesIO() - _write = out.write - - # Note on sanitization: we intentionally sanitize in emission (serialization) - # and not during aggregation for performance reasons. This means that the - # envelope can in fact have duplicate buckets stored. This is acceptable for - # relay side emission and should not happen commonly. - - for timestamp, buckets in flushable_buckets: - for bucket_key, metric in buckets.items(): - metric_type, metric_name, metric_unit, metric_tags = bucket_key - metric_name = _sanitize_metric_key(metric_name) - metric_unit = _sanitize_unit(metric_unit) - _write(metric_name.encode("utf-8")) - _write(b"@") - _write(metric_unit.encode("utf-8")) - - for serialized_value in metric.serialize_value(): - _write(b":") - _write(str(serialized_value).encode("utf-8")) - - _write(b"|") - _write(metric_type.encode("ascii")) - - if metric_tags: - _write(b"|#") - first = True - for tag_key, tag_value in metric_tags: - tag_key = _sanitize_tag_key(tag_key) - if not tag_key: - continue - if first: - first = False - else: - _write(b",") - _write(tag_key.encode("utf-8")) - _write(b":") - _write(_sanitize_tag_value(tag_value).encode("utf-8")) - - _write(b"|T") - _write(str(timestamp).encode("ascii")) - _write(b"\n") - - return out.getvalue() - - -def _encode_locations(timestamp, code_locations): - # type: (int, Iterable[Tuple[MetricMetaKey, Dict[str, Any]]]) -> bytes - mapping = {} # type: Dict[str, List[Any]] - - for key, loc in code_locations: - metric_type, name, unit = key - mri = "{}:{}@{}".format( - metric_type, _sanitize_metric_key(name), _sanitize_unit(unit) - ) - - loc["type"] = "location" - mapping.setdefault(mri, []).append(loc) - - return json_dumps({"timestamp": timestamp, "mapping": mapping}) - - -METRIC_TYPES = { - "c": CounterMetric, - "g": GaugeMetric, - "d": DistributionMetric, - "s": SetMetric, -} # type: dict[MetricType, type[Metric]] - -# some of these are dumb -TIMING_FUNCTIONS = { - "nanosecond": nanosecond_time, - "microsecond": lambda: nanosecond_time() / 1000.0, - "millisecond": lambda: nanosecond_time() / 1000000.0, - "second": now, - "minute": lambda: now() / 60.0, - "hour": lambda: now() / 3600.0, - "day": lambda: now() / 3600.0 / 24.0, - "week": lambda: now() / 3600.0 / 24.0 / 7.0, -} - - -class LocalAggregator: - __slots__ = ("_measurements",) - - def __init__(self): - # type: (...) -> None - self._measurements = ( - {} - ) # type: Dict[Tuple[str, MetricTagsInternal], Tuple[float, float, int, float]] - - def add( - self, - ty, # type: MetricType - key, # type: str - value, # type: float - unit, # type: MeasurementUnit - tags, # type: MetricTagsInternal - ): - # type: (...) -> None - export_key = "%s:%s@%s" % (ty, key, unit) - bucket_key = (export_key, tags) - - old = self._measurements.get(bucket_key) - if old is not None: - v_min, v_max, v_count, v_sum = old - v_min = min(v_min, value) - v_max = max(v_max, value) - v_count += 1 - v_sum += value - else: - v_min = v_max = v_sum = value - v_count = 1 - self._measurements[bucket_key] = (v_min, v_max, v_count, v_sum) - - def to_json(self): - # type: (...) -> Dict[str, Any] - rv = {} # type: Any - for (export_key, tags), ( - v_min, - v_max, - v_count, - v_sum, - ) in self._measurements.items(): - rv.setdefault(export_key, []).append( - { - "tags": _tags_to_dict(tags), - "min": v_min, - "max": v_max, - "count": v_count, - "sum": v_sum, - } - ) - return rv - - -class MetricsAggregator: - ROLLUP_IN_SECONDS = 10.0 - MAX_WEIGHT = 100000 - FLUSHER_SLEEP_TIME = 5.0 - - def __init__( - self, - capture_func, # type: Callable[[Envelope], None] - enable_code_locations=False, # type: bool - ): - # type: (...) -> None - self.buckets = {} # type: Dict[int, Any] - self._enable_code_locations = enable_code_locations - self._seen_locations = _set() # type: Set[Tuple[int, MetricMetaKey]] - self._pending_locations = {} # type: Dict[int, List[Tuple[MetricMetaKey, Any]]] - self._buckets_total_weight = 0 - self._capture_func = capture_func - self._running = True - self._lock = threading.Lock() - - self._flush_event = threading.Event() # type: threading.Event - self._force_flush = False - - # The aggregator shifts its flushing by up to an entire rollup window to - # avoid multiple clients trampling on end of a 10 second window as all the - # buckets are anchored to multiples of ROLLUP seconds. We randomize this - # number once per aggregator boot to achieve some level of offsetting - # across a fleet of deployed SDKs. Relay itself will also apply independent - # jittering. - self._flush_shift = random.random() * self.ROLLUP_IN_SECONDS - - self._flusher = None # type: Optional[threading.Thread] - self._flusher_pid = None # type: Optional[int] - - def _ensure_thread(self): - # type: (...) -> bool - """For forking processes we might need to restart this thread. - This ensures that our process actually has that thread running. - """ - if not self._running: - return False - - pid = os.getpid() - if self._flusher_pid == pid: - return True - - with self._lock: - # Recheck to make sure another thread didn't get here and start the - # the flusher in the meantime - if self._flusher_pid == pid: - return True - - self._flusher_pid = pid - - self._flusher = threading.Thread(target=self._flush_loop) - self._flusher.daemon = True - - try: - self._flusher.start() - except RuntimeError: - # Unfortunately at this point the interpreter is in a state that no - # longer allows us to spawn a thread and we have to bail. - self._running = False - return False - - return True - - def _flush_loop(self): - # type: (...) -> None - _in_metrics.set(True) - while self._running or self._force_flush: - if self._running: - self._flush_event.wait(self.FLUSHER_SLEEP_TIME) - self._flush() - - def _flush(self): - # type: (...) -> None - self._emit(self._flushable_buckets(), self._flushable_locations()) - - def _flushable_buckets(self): - # type: (...) -> (Iterable[Tuple[int, Dict[BucketKey, Metric]]]) - with self._lock: - force_flush = self._force_flush - cutoff = time.time() - self.ROLLUP_IN_SECONDS - self._flush_shift - flushable_buckets = () # type: Iterable[Tuple[int, Dict[BucketKey, Metric]]] - weight_to_remove = 0 - - if force_flush: - flushable_buckets = self.buckets.items() - self.buckets = {} - self._buckets_total_weight = 0 - self._force_flush = False - else: - flushable_buckets = [] - for buckets_timestamp, buckets in self.buckets.items(): - # If the timestamp of the bucket is newer that the rollup we want to skip it. - if buckets_timestamp <= cutoff: - flushable_buckets.append((buckets_timestamp, buckets)) - - # We will clear the elements while holding the lock, in order to avoid requesting it downstream again. - for buckets_timestamp, buckets in flushable_buckets: - for metric in buckets.values(): - weight_to_remove += metric.weight - del self.buckets[buckets_timestamp] - - self._buckets_total_weight -= weight_to_remove - - return flushable_buckets - - def _flushable_locations(self): - # type: (...) -> Dict[int, List[Tuple[MetricMetaKey, Dict[str, Any]]]] - with self._lock: - locations = self._pending_locations - self._pending_locations = {} - return locations - - @metrics_noop - def add( - self, - ty, # type: MetricType - key, # type: str - value, # type: MetricValue - unit, # type: MeasurementUnit - tags, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - local_aggregator=None, # type: Optional[LocalAggregator] - stacklevel=0, # type: Optional[int] - ): - # type: (...) -> None - if not self._ensure_thread() or self._flusher is None: - return None - - if timestamp is None: - timestamp = time.time() - elif isinstance(timestamp, datetime): - timestamp = to_timestamp(timestamp) - - bucket_timestamp = int( - (timestamp // self.ROLLUP_IN_SECONDS) * self.ROLLUP_IN_SECONDS - ) - serialized_tags = _serialize_tags(tags) - bucket_key = ( - ty, - key, - unit, - serialized_tags, - ) - - with self._lock: - local_buckets = self.buckets.setdefault(bucket_timestamp, {}) - metric = local_buckets.get(bucket_key) - if metric is not None: - previous_weight = metric.weight - metric.add(value) - else: - metric = local_buckets[bucket_key] = METRIC_TYPES[ty](value) - previous_weight = 0 - - added = metric.weight - previous_weight - - if stacklevel is not None: - self.record_code_location(ty, key, unit, stacklevel + 2, timestamp) - - # Given the new weight we consider whether we want to force flush. - self._consider_force_flush() - - # For sets, we only record that a value has been added to the set but not which one. - # See develop docs: https://develop.sentry.dev/sdk/metrics/#sets - if local_aggregator is not None: - local_value = float(added if ty == "s" else value) - local_aggregator.add(ty, key, local_value, unit, serialized_tags) - - def record_code_location( - self, - ty, # type: MetricType - key, # type: str - unit, # type: MeasurementUnit - stacklevel, # type: int - timestamp=None, # type: Optional[float] - ): - # type: (...) -> None - if not self._enable_code_locations: - return - if timestamp is None: - timestamp = time.time() - meta_key = (ty, key, unit) - start_of_day = datetime.fromtimestamp(timestamp, timezone.utc).replace( - hour=0, minute=0, second=0, microsecond=0, tzinfo=None - ) - start_of_day = int(to_timestamp(start_of_day)) - - if (start_of_day, meta_key) not in self._seen_locations: - self._seen_locations.add((start_of_day, meta_key)) - loc = get_code_location(stacklevel + 3) - if loc is not None: - # Group metadata by day to make flushing more efficient. - # There needs to be one envelope item per timestamp. - self._pending_locations.setdefault(start_of_day, []).append( - (meta_key, loc) - ) - - @metrics_noop - def need_code_location( - self, - ty, # type: MetricType - key, # type: str - unit, # type: MeasurementUnit - timestamp, # type: float - ): - # type: (...) -> bool - if self._enable_code_locations: - return False - meta_key = (ty, key, unit) - start_of_day = datetime.fromtimestamp(timestamp, timezone.utc).replace( - hour=0, minute=0, second=0, microsecond=0, tzinfo=None - ) - start_of_day = int(to_timestamp(start_of_day)) - return (start_of_day, meta_key) not in self._seen_locations - - def kill(self): - # type: (...) -> None - if self._flusher is None: - return - - self._running = False - self._flush_event.set() - self._flusher = None - - @metrics_noop - def flush(self): - # type: (...) -> None - self._force_flush = True - self._flush() - - def _consider_force_flush(self): - # type: (...) -> None - # It's important to acquire a lock around this method, since it will touch shared data structures. - total_weight = len(self.buckets) + self._buckets_total_weight - if total_weight >= self.MAX_WEIGHT: - self._force_flush = True - self._flush_event.set() - - def _emit( - self, - flushable_buckets, # type: (Iterable[Tuple[int, Dict[BucketKey, Metric]]]) - code_locations, # type: Dict[int, List[Tuple[MetricMetaKey, Dict[str, Any]]]] - ): - # type: (...) -> Optional[Envelope] - envelope = Envelope() - - if flushable_buckets: - encoded_metrics = _encode_metrics(flushable_buckets) - envelope.add_item(Item(payload=encoded_metrics, type="statsd")) - - for timestamp, locations in code_locations.items(): - encoded_locations = _encode_locations(timestamp, locations) - envelope.add_item(Item(payload=encoded_locations, type="metric_meta")) - - if envelope.items: - self._capture_func(envelope) - return envelope - return None - - -def _serialize_tags( - tags, # type: Optional[MetricTags] -): - # type: (...) -> MetricTagsInternal - if not tags: - return () - - rv = [] - for key, value in tags.items(): - # If the value is a collection, we want to flatten it. - if isinstance(value, (list, tuple)): - for inner_value in value: - if inner_value is not None: - rv.append((key, str(inner_value))) - elif value is not None: - rv.append((key, str(value))) - - # It's very important to sort the tags in order to obtain the - # same bucket key. - return tuple(sorted(rv)) - - -def _tags_to_dict(tags): - # type: (MetricTagsInternal) -> Dict[str, Any] - rv = {} # type: Dict[str, Any] - for tag_name, tag_value in tags: - old_value = rv.get(tag_name) - if old_value is not None: - if isinstance(old_value, list): - old_value.append(tag_value) - else: - rv[tag_name] = [old_value, tag_value] - else: - rv[tag_name] = tag_value - return rv - - -def _get_aggregator(): - # type: () -> Optional[MetricsAggregator] - client = sentry_sdk.get_client() - return ( - client.metrics_aggregator - if client.is_active() and client.metrics_aggregator is not None - else None - ) - - -def _get_aggregator_and_update_tags(key, value, unit, tags): - # type: (str, Optional[MetricValue], MeasurementUnit, Optional[MetricTags]) -> Tuple[Optional[MetricsAggregator], Optional[LocalAggregator], Optional[MetricTags]] - client = sentry_sdk.get_client() - if not client.is_active() or client.metrics_aggregator is None: - return None, None, tags - - updated_tags = dict(tags or ()) # type: Dict[str, MetricTagValue] - updated_tags.setdefault("release", client.options["release"]) - updated_tags.setdefault("environment", client.options["environment"]) - - scope = sentry_sdk.get_current_scope() - local_aggregator = None - - # We go with the low-level API here to access transaction information as - # this one is the same between just errors and errors + performance - transaction_source = scope._transaction_info.get("source") - if transaction_source in GOOD_TRANSACTION_SOURCES: - transaction_name = scope._transaction - if transaction_name: - updated_tags.setdefault("transaction", transaction_name) - if scope._span is not None: - local_aggregator = scope._span._get_local_aggregator() - - experiments = client.options.get("_experiments", {}) - before_emit_callback = experiments.get("before_emit_metric") - if before_emit_callback is not None: - with recursion_protection() as in_metrics: - if not in_metrics: - if not before_emit_callback(key, value, unit, updated_tags): - return None, None, updated_tags - - return client.metrics_aggregator, local_aggregator, updated_tags - - -def increment( - key, # type: str - value=1.0, # type: float - unit="none", # type: MeasurementUnit - tags=None, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - stacklevel=0, # type: int -): - # type: (...) -> None - """Increments a counter.""" - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( - key, value, unit, tags - ) - if aggregator is not None: - aggregator.add( - "c", key, value, unit, tags, timestamp, local_aggregator, stacklevel - ) - - -# alias as incr is relatively common in python -incr = increment - - -class _Timing: - def __init__( - self, - key, # type: str - tags, # type: Optional[MetricTags] - timestamp, # type: Optional[Union[float, datetime]] - value, # type: Optional[float] - unit, # type: DurationUnit - stacklevel, # type: int - ): - # type: (...) -> None - self.key = key - self.tags = tags - self.timestamp = timestamp - self.value = value - self.unit = unit - self.entered = None # type: Optional[float] - self._span = None # type: Optional[sentry_sdk.tracing.Span] - self.stacklevel = stacklevel - - def _validate_invocation(self, context): - # type: (str) -> None - if self.value is not None: - raise TypeError( - "cannot use timing as %s when a value is provided" % context - ) - - def __enter__(self): - # type: (...) -> _Timing - self.entered = TIMING_FUNCTIONS[self.unit]() - self._validate_invocation("context-manager") - self._span = sentry_sdk.start_span(op="metric.timing", name=self.key) - if self.tags: - for key, value in self.tags.items(): - if isinstance(value, (tuple, list)): - value = ",".join(sorted(map(str, value))) - self._span.set_tag(key, value) - self._span.__enter__() - - # report code locations here for better accuracy - aggregator = _get_aggregator() - if aggregator is not None: - aggregator.record_code_location("d", self.key, self.unit, self.stacklevel) - - return self - - def __exit__(self, exc_type, exc_value, tb): - # type: (Any, Any, Any) -> None - assert self._span, "did not enter" - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( - self.key, - self.value, - self.unit, - self.tags, - ) - if aggregator is not None: - elapsed = TIMING_FUNCTIONS[self.unit]() - self.entered # type: ignore - aggregator.add( - "d", - self.key, - elapsed, - self.unit, - tags, - self.timestamp, - local_aggregator, - None, # code locations are reported in __enter__ - ) - - self._span.__exit__(exc_type, exc_value, tb) - self._span = None - - def __call__(self, f): - # type: (Any) -> Any - self._validate_invocation("decorator") - - @wraps(f) - def timed_func(*args, **kwargs): - # type: (*Any, **Any) -> Any - with timing( - key=self.key, - tags=self.tags, - timestamp=self.timestamp, - unit=self.unit, - stacklevel=self.stacklevel + 1, - ): - return f(*args, **kwargs) - - return timed_func - - -def timing( - key, # type: str - value=None, # type: Optional[float] - unit="second", # type: DurationUnit - tags=None, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - stacklevel=0, # type: int -): - # type: (...) -> _Timing - """Emits a distribution with the time it takes to run the given code block. - - This method supports three forms of invocation: - - - when a `value` is provided, it functions similar to `distribution` but with - - it can be used as a context manager - - it can be used as a decorator - """ - if value is not None: - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( - key, value, unit, tags - ) - if aggregator is not None: - aggregator.add( - "d", key, value, unit, tags, timestamp, local_aggregator, stacklevel - ) - return _Timing(key, tags, timestamp, value, unit, stacklevel) - - -def distribution( - key, # type: str - value, # type: float - unit="none", # type: MeasurementUnit - tags=None, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - stacklevel=0, # type: int -): - # type: (...) -> None - """Emits a distribution.""" - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( - key, value, unit, tags - ) - if aggregator is not None: - aggregator.add( - "d", key, value, unit, tags, timestamp, local_aggregator, stacklevel - ) - - -def set( - key, # type: str - value, # type: Union[int, str] - unit="none", # type: MeasurementUnit - tags=None, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - stacklevel=0, # type: int -): - # type: (...) -> None - """Emits a set.""" - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( - key, value, unit, tags - ) - if aggregator is not None: - aggregator.add( - "s", key, value, unit, tags, timestamp, local_aggregator, stacklevel - ) - - -def gauge( - key, # type: str - value, # type: float - unit="none", # type: MeasurementUnit - tags=None, # type: Optional[MetricTags] - timestamp=None, # type: Optional[Union[float, datetime]] - stacklevel=0, # type: int -): - # type: (...) -> None - """Emits a gauge.""" - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( - key, value, unit, tags - ) - if aggregator is not None: - aggregator.add( - "g", key, value, unit, tags, timestamp, local_aggregator, stacklevel - ) diff --git a/sentry_sdk/opentelemetry/__init__.py b/sentry_sdk/opentelemetry/__init__.py new file mode 100644 index 0000000000..2d057016c1 --- /dev/null +++ b/sentry_sdk/opentelemetry/__init__.py @@ -0,0 +1,9 @@ +from sentry_sdk.opentelemetry.propagator import SentryPropagator +from sentry_sdk.opentelemetry.sampler import SentrySampler +from sentry_sdk.opentelemetry.span_processor import SentrySpanProcessor + +__all__ = [ + "SentryPropagator", + "SentrySampler", + "SentrySpanProcessor", +] diff --git a/sentry_sdk/opentelemetry/consts.py b/sentry_sdk/opentelemetry/consts.py new file mode 100644 index 0000000000..7f7afce9e2 --- /dev/null +++ b/sentry_sdk/opentelemetry/consts.py @@ -0,0 +1,33 @@ +from opentelemetry.context import create_key +from sentry_sdk.tracing_utils import Baggage + + +# propagation keys +SENTRY_TRACE_KEY = create_key("sentry-trace") +SENTRY_BAGGAGE_KEY = create_key("sentry-baggage") + +# scope management keys +SENTRY_SCOPES_KEY = create_key("sentry_scopes") +SENTRY_FORK_ISOLATION_SCOPE_KEY = create_key("sentry_fork_isolation_scope") +SENTRY_USE_CURRENT_SCOPE_KEY = create_key("sentry_use_current_scope") +SENTRY_USE_ISOLATION_SCOPE_KEY = create_key("sentry_use_isolation_scope") + +# trace state keys +TRACESTATE_SAMPLED_KEY = Baggage.SENTRY_PREFIX + "sampled" +TRACESTATE_SAMPLE_RATE_KEY = Baggage.SENTRY_PREFIX + "sample_rate" +TRACESTATE_SAMPLE_RAND_KEY = Baggage.SENTRY_PREFIX + "sample_rand" + +# misc +OTEL_SENTRY_CONTEXT = "otel" +SPAN_ORIGIN = "auto.otel" + + +class SentrySpanAttribute: + DESCRIPTION = "sentry.description" + OP = "sentry.op" + ORIGIN = "sentry.origin" + TAG = "sentry.tag" + NAME = "sentry.name" + SOURCE = "sentry.source" + CONTEXT = "sentry.context" + CUSTOM_SAMPLED = "sentry.custom_sampled" # used for saving start_span(sampled=X) diff --git a/sentry_sdk/opentelemetry/contextvars_context.py b/sentry_sdk/opentelemetry/contextvars_context.py new file mode 100644 index 0000000000..51d450af82 --- /dev/null +++ b/sentry_sdk/opentelemetry/contextvars_context.py @@ -0,0 +1,73 @@ +from typing import cast, TYPE_CHECKING + +from opentelemetry.trace import set_span_in_context +from opentelemetry.context import Context, get_value, set_value +from opentelemetry.context.contextvars_context import ContextVarsRuntimeContext + +import sentry_sdk +from sentry_sdk.opentelemetry.consts import ( + SENTRY_SCOPES_KEY, + SENTRY_FORK_ISOLATION_SCOPE_KEY, + SENTRY_USE_CURRENT_SCOPE_KEY, + SENTRY_USE_ISOLATION_SCOPE_KEY, +) + +if TYPE_CHECKING: + from typing import Optional + from contextvars import Token + import sentry_sdk.opentelemetry.scope as scope + + +class SentryContextVarsRuntimeContext(ContextVarsRuntimeContext): + def attach(self, context): + # type: (Context) -> Token[Context] + scopes = get_value(SENTRY_SCOPES_KEY, context) + + should_fork_isolation_scope = context.pop( + SENTRY_FORK_ISOLATION_SCOPE_KEY, False + ) + should_fork_isolation_scope = cast("bool", should_fork_isolation_scope) + + should_use_isolation_scope = context.pop(SENTRY_USE_ISOLATION_SCOPE_KEY, None) + should_use_isolation_scope = cast( + "Optional[scope.PotelScope]", should_use_isolation_scope + ) + + should_use_current_scope = context.pop(SENTRY_USE_CURRENT_SCOPE_KEY, None) + should_use_current_scope = cast( + "Optional[scope.PotelScope]", should_use_current_scope + ) + + if scopes: + scopes = cast("tuple[scope.PotelScope, scope.PotelScope]", scopes) + (current_scope, isolation_scope) = scopes + else: + current_scope = sentry_sdk.get_current_scope() + isolation_scope = sentry_sdk.get_isolation_scope() + + new_context = context + + if should_use_current_scope: + new_scope = should_use_current_scope + + # the main case where we use use_scope is for + # scope propagation in the ThreadingIntegration + # so we need to carry forward the span reference explicitly too + span = should_use_current_scope.span + if span: + new_context = set_span_in_context(span._otel_span, new_context) + + else: + new_scope = current_scope.fork() + + if should_use_isolation_scope: + new_isolation_scope = should_use_isolation_scope + elif should_fork_isolation_scope: + new_isolation_scope = isolation_scope.fork() + else: + new_isolation_scope = isolation_scope + + new_scopes = (new_scope, new_isolation_scope) + + new_context = set_value(SENTRY_SCOPES_KEY, new_scopes, new_context) + return super().attach(new_context) diff --git a/sentry_sdk/integrations/opentelemetry/propagator.py b/sentry_sdk/opentelemetry/propagator.py similarity index 73% rename from sentry_sdk/integrations/opentelemetry/propagator.py rename to sentry_sdk/opentelemetry/propagator.py index b84d582d6e..6062e5643a 100644 --- a/sentry_sdk/integrations/opentelemetry/propagator.py +++ b/sentry_sdk/opentelemetry/propagator.py @@ -1,7 +1,10 @@ +from typing import cast + from opentelemetry import trace from opentelemetry.context import ( Context, get_current, + get_value, set_value, ) from opentelemetry.propagators.textmap import ( @@ -18,23 +21,22 @@ TraceFlags, ) -from sentry_sdk.integrations.opentelemetry.consts import ( - SENTRY_BAGGAGE_KEY, - SENTRY_TRACE_KEY, -) -from sentry_sdk.integrations.opentelemetry.span_processor import ( - SentrySpanProcessor, -) -from sentry_sdk.tracing import ( +from sentry_sdk.consts import ( BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME, ) +from sentry_sdk.opentelemetry.consts import ( + SENTRY_BAGGAGE_KEY, + SENTRY_TRACE_KEY, + SENTRY_SCOPES_KEY, +) from sentry_sdk.tracing_utils import Baggage, extract_sentrytrace_data from typing import TYPE_CHECKING if TYPE_CHECKING: from typing import Optional, Set + import sentry_sdk.opentelemetry.scope as scope class SentryPropagator(TextMapPropagator): @@ -47,6 +49,7 @@ def extract(self, carrier, context=None, getter=default_getter): if context is None: context = get_current() + # TODO-neel-potel cleanup with continue_trace / isolation_scope sentry_trace = getter.get(carrier, SENTRY_TRACE_HEADER_NAME) if not sentry_trace: return context @@ -89,27 +92,15 @@ def inject(self, carrier, context=None, setter=default_setter): if context is None: context = get_current() - current_span = trace.get_current_span(context) - current_span_context = current_span.get_span_context() - - if not current_span_context.is_valid: - return - - span_id = trace.format_span_id(current_span_context.span_id) - - span_map = SentrySpanProcessor().otel_span_map - sentry_span = span_map.get(span_id, None) - if not sentry_span: - return - - setter.set(carrier, SENTRY_TRACE_HEADER_NAME, sentry_span.to_traceparent()) + scopes = get_value(SENTRY_SCOPES_KEY, context) + if scopes: + scopes = cast("tuple[scope.PotelScope, scope.PotelScope]", scopes) + (current_scope, _) = scopes - if sentry_span.containing_transaction: - baggage = sentry_span.containing_transaction.get_baggage() - if baggage: - baggage_data = baggage.serialize() - if baggage_data: - setter.set(carrier, BAGGAGE_HEADER_NAME, baggage_data) + # TODO-neel-potel check trace_propagation_targets + # TODO-neel-potel test propagator works with twp + for key, value in current_scope.iter_trace_propagation_headers(): + setter.set(carrier, key, value) @property def fields(self): diff --git a/sentry_sdk/opentelemetry/sampler.py b/sentry_sdk/opentelemetry/sampler.py new file mode 100644 index 0000000000..fb68b644b5 --- /dev/null +++ b/sentry_sdk/opentelemetry/sampler.py @@ -0,0 +1,316 @@ +from decimal import Decimal +from typing import cast + +from opentelemetry import trace +from opentelemetry.sdk.trace.sampling import Sampler, SamplingResult, Decision +from opentelemetry.trace.span import TraceState + +import sentry_sdk +from sentry_sdk.opentelemetry.consts import ( + TRACESTATE_SAMPLED_KEY, + TRACESTATE_SAMPLE_RAND_KEY, + TRACESTATE_SAMPLE_RATE_KEY, + SentrySpanAttribute, +) +from sentry_sdk.tracing_utils import ( + _generate_sample_rand, + has_tracing_enabled, +) +from sentry_sdk.utils import is_valid_sample_rate, logger + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Optional, Sequence, Union + from opentelemetry.context import Context + from opentelemetry.trace import Link, SpanKind + from opentelemetry.trace.span import SpanContext + from opentelemetry.util.types import Attributes + + +def get_parent_sampled(parent_context, trace_id): + # type: (Optional[SpanContext], int) -> Optional[bool] + if parent_context is None: + return None + + is_span_context_valid = parent_context is not None and parent_context.is_valid + + # Only inherit sample rate if `traceId` is the same + if is_span_context_valid and parent_context.trace_id == trace_id: + # this is getSamplingDecision in JS + # if there was no sampling flag, defer the decision + dsc_sampled = parent_context.trace_state.get(TRACESTATE_SAMPLED_KEY) + if dsc_sampled == "deferred": + return None + + if parent_context.trace_flags.sampled is not None: + return parent_context.trace_flags.sampled + + if dsc_sampled == "true": + return True + elif dsc_sampled == "false": + return False + + return None + + +def get_parent_sample_rate(parent_context, trace_id): + # type: (Optional[SpanContext], int) -> Optional[float] + if parent_context is None: + return None + + is_span_context_valid = parent_context is not None and parent_context.is_valid + + if is_span_context_valid and parent_context.trace_id == trace_id: + parent_sample_rate = parent_context.trace_state.get(TRACESTATE_SAMPLE_RATE_KEY) + if parent_sample_rate is None: + return None + + try: + return float(parent_sample_rate) + except Exception: + return None + + return None + + +def get_parent_sample_rand(parent_context, trace_id): + # type: (Optional[SpanContext], int) -> Optional[Decimal] + if parent_context is None: + return None + + is_span_context_valid = parent_context is not None and parent_context.is_valid + + if is_span_context_valid and parent_context.trace_id == trace_id: + parent_sample_rand = parent_context.trace_state.get(TRACESTATE_SAMPLE_RAND_KEY) + if parent_sample_rand is None: + return None + + return Decimal(parent_sample_rand) + + return None + + +def dropped_result(span_context, attributes, sample_rate=None, sample_rand=None): + # type: (SpanContext, Attributes, Optional[float], Optional[Decimal]) -> SamplingResult + """ + React to a span getting unsampled and return a DROP SamplingResult. + + Update the trace_state with the effective sampled, sample_rate and sample_rand, + record that we dropped the event for client report purposes, and return + an OTel SamplingResult with Decision.DROP. + + See for more info about OTel sampling: + https://opentelemetry-python.readthedocs.io/en/latest/sdk/trace.sampling.html + """ + trace_state = _update_trace_state( + span_context, sampled=False, sample_rate=sample_rate, sample_rand=sample_rand + ) + + is_root_span = not (span_context.is_valid and not span_context.is_remote) + if is_root_span: + # Tell Sentry why we dropped the transaction/root-span + client = sentry_sdk.get_client() + if client.monitor and client.monitor.downsample_factor > 0: + reason = "backpressure" + else: + reason = "sample_rate" + + if client.transport and has_tracing_enabled(client.options): + client.transport.record_lost_event(reason, data_category="transaction") + + # Only one span (the transaction itself) is discarded, since we did not record any spans here. + client.transport.record_lost_event(reason, data_category="span") + + return SamplingResult( + Decision.DROP, + attributes=attributes, + trace_state=trace_state, + ) + + +def sampled_result(span_context, attributes, sample_rate=None, sample_rand=None): + # type: (SpanContext, Attributes, Optional[float], Optional[Decimal]) -> SamplingResult + """ + React to a span being sampled and return a sampled SamplingResult. + + Update the trace_state with the effective sampled, sample_rate and sample_rand, + and return an OTel SamplingResult with Decision.RECORD_AND_SAMPLE. + + See for more info about OTel sampling: + https://opentelemetry-python.readthedocs.io/en/latest/sdk/trace.sampling.html + """ + trace_state = _update_trace_state( + span_context, sampled=True, sample_rate=sample_rate, sample_rand=sample_rand + ) + + return SamplingResult( + Decision.RECORD_AND_SAMPLE, + attributes=attributes, + trace_state=trace_state, + ) + + +def _update_trace_state(span_context, sampled, sample_rate=None, sample_rand=None): + # type: (SpanContext, bool, Optional[float], Optional[Decimal]) -> TraceState + trace_state = span_context.trace_state + + sampled = "true" if sampled else "false" + if TRACESTATE_SAMPLED_KEY not in trace_state: + trace_state = trace_state.add(TRACESTATE_SAMPLED_KEY, sampled) + elif trace_state.get(TRACESTATE_SAMPLED_KEY) == "deferred": + trace_state = trace_state.update(TRACESTATE_SAMPLED_KEY, sampled) + + if sample_rate is not None: + trace_state = trace_state.update(TRACESTATE_SAMPLE_RATE_KEY, str(sample_rate)) + + if sample_rand is not None: + trace_state = trace_state.update( + TRACESTATE_SAMPLE_RAND_KEY, f"{sample_rand:.6f}" # noqa: E231 + ) + + return trace_state + + +class SentrySampler(Sampler): + def should_sample( + self, + parent_context, # type: Optional[Context] + trace_id, # type: int + name, # type: str + kind=None, # type: Optional[SpanKind] + attributes=None, # type: Attributes + links=None, # type: Optional[Sequence[Link]] + trace_state=None, # type: Optional[TraceState] + ): + # type: (...) -> SamplingResult + client = sentry_sdk.get_client() + + parent_span_context = trace.get_current_span(parent_context).get_span_context() + + attributes = attributes or {} + + # No tracing enabled, thus no sampling + if not has_tracing_enabled(client.options): + return dropped_result(parent_span_context, attributes) + + # parent_span_context.is_valid means this span has a parent, remote or local + is_root_span = not parent_span_context.is_valid or parent_span_context.is_remote + + sample_rate = None + + parent_sampled = get_parent_sampled(parent_span_context, trace_id) + parent_sample_rate = get_parent_sample_rate(parent_span_context, trace_id) + parent_sample_rand = get_parent_sample_rand(parent_span_context, trace_id) + + if parent_sample_rand is not None: + # We have a sample_rand on the incoming trace or we already backfilled + # it in PropagationContext + sample_rand = parent_sample_rand + else: + # We are the head SDK and we need to generate a new sample_rand + sample_rand = cast(Decimal, _generate_sample_rand(str(trace_id), (0, 1))) + + # Explicit sampled value provided at start_span + custom_sampled = cast( + "Optional[bool]", attributes.get(SentrySpanAttribute.CUSTOM_SAMPLED) + ) + if custom_sampled is not None: + if is_root_span: + sample_rate = float(custom_sampled) + if sample_rate > 0: + return sampled_result( + parent_span_context, + attributes, + sample_rate=sample_rate, + sample_rand=sample_rand, + ) + else: + return dropped_result( + parent_span_context, + attributes, + sample_rate=sample_rate, + sample_rand=sample_rand, + ) + else: + logger.debug( + f"[Tracing] Ignoring sampled param for non-root span {name}" + ) + + # Check if there is a traces_sampler + # Traces_sampler is responsible to check parent sampled to have full transactions. + has_traces_sampler = callable(client.options.get("traces_sampler")) + + sample_rate_to_propagate = None + + if is_root_span and has_traces_sampler: + sampling_context = create_sampling_context( + name, attributes, parent_span_context, trace_id + ) + sample_rate = client.options["traces_sampler"](sampling_context) + sample_rate_to_propagate = sample_rate + else: + # Check if there is a parent with a sampling decision + if parent_sampled is not None: + sample_rate = bool(parent_sampled) + sample_rate_to_propagate = ( + parent_sample_rate if parent_sample_rate else sample_rate + ) + else: + # Check if there is a traces_sample_rate + sample_rate = client.options.get("traces_sample_rate") + sample_rate_to_propagate = sample_rate + + # If the sample rate is invalid, drop the span + if not is_valid_sample_rate(sample_rate, source=self.__class__.__name__): + logger.warning( + f"[Tracing] Discarding {name} because of invalid sample rate." + ) + return dropped_result(parent_span_context, attributes) + + # Down-sample in case of back pressure monitor says so + if is_root_span and client.monitor: + sample_rate /= 2**client.monitor.downsample_factor + if client.monitor.downsample_factor > 0: + sample_rate_to_propagate = sample_rate + + # Compare sample_rand to sample_rate to make the final sampling decision + sample_rate = float(cast("Union[bool, float, int]", sample_rate)) + sampled = sample_rand < Decimal.from_float(sample_rate) + + if sampled: + return sampled_result( + parent_span_context, + attributes, + sample_rate=sample_rate_to_propagate, + sample_rand=None if sample_rand == parent_sample_rand else sample_rand, + ) + else: + return dropped_result( + parent_span_context, + attributes, + sample_rate=sample_rate_to_propagate, + sample_rand=None if sample_rand == parent_sample_rand else sample_rand, + ) + + def get_description(self) -> str: + return self.__class__.__name__ + + +def create_sampling_context(name, attributes, parent_span_context, trace_id): + # type: (str, Attributes, Optional[SpanContext], int) -> dict[str, Any] + sampling_context = { + "transaction_context": { + "name": name, + "op": attributes.get(SentrySpanAttribute.OP) if attributes else None, + "source": ( + attributes.get(SentrySpanAttribute.SOURCE) if attributes else None + ), + }, + "parent_sampled": get_parent_sampled(parent_span_context, trace_id), + } # type: dict[str, Any] + + if attributes is not None: + sampling_context.update(attributes) + + return sampling_context diff --git a/sentry_sdk/opentelemetry/scope.py b/sentry_sdk/opentelemetry/scope.py new file mode 100644 index 0000000000..4db5e288e3 --- /dev/null +++ b/sentry_sdk/opentelemetry/scope.py @@ -0,0 +1,218 @@ +from typing import cast +from contextlib import contextmanager +import warnings + +from opentelemetry.context import ( + get_value, + set_value, + attach, + detach, + get_current, +) +from opentelemetry.trace import ( + SpanContext, + NonRecordingSpan, + TraceFlags, + TraceState, + use_span, +) + +from sentry_sdk.opentelemetry.consts import ( + SENTRY_SCOPES_KEY, + SENTRY_FORK_ISOLATION_SCOPE_KEY, + SENTRY_USE_CURRENT_SCOPE_KEY, + SENTRY_USE_ISOLATION_SCOPE_KEY, + TRACESTATE_SAMPLED_KEY, +) +from sentry_sdk.opentelemetry.contextvars_context import ( + SentryContextVarsRuntimeContext, +) +from sentry_sdk.opentelemetry.utils import trace_state_from_baggage +from sentry_sdk.scope import Scope, ScopeType +from sentry_sdk.tracing import Span +from sentry_sdk._types import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Tuple, Optional, Generator, Dict, Any + + +class PotelScope(Scope): + @classmethod + def _get_scopes(cls): + # type: () -> Optional[Tuple[PotelScope, PotelScope]] + """ + Returns the current scopes tuple on the otel context. Internal use only. + """ + return cast( + "Optional[Tuple[PotelScope, PotelScope]]", get_value(SENTRY_SCOPES_KEY) + ) + + @classmethod + def get_current_scope(cls): + # type: () -> PotelScope + """ + Returns the current scope. + """ + return cls._get_current_scope() or _INITIAL_CURRENT_SCOPE + + @classmethod + def _get_current_scope(cls): + # type: () -> Optional[PotelScope] + """ + Returns the current scope without creating a new one. Internal use only. + """ + scopes = cls._get_scopes() + return scopes[0] if scopes else None + + @classmethod + def get_isolation_scope(cls): + # type: () -> PotelScope + """ + Returns the isolation scope. + """ + return cls._get_isolation_scope() or _INITIAL_ISOLATION_SCOPE + + @classmethod + def _get_isolation_scope(cls): + # type: () -> Optional[PotelScope] + """ + Returns the isolation scope without creating a new one. Internal use only. + """ + scopes = cls._get_scopes() + return scopes[1] if scopes else None + + @contextmanager + def continue_trace(self, environ_or_headers): + # type: (Dict[str, Any]) -> Generator[None, None, None] + """ + Sets the propagation context from environment or headers to continue an incoming trace. + Any span started within this context manager will use the same trace_id, parent_span_id + and inherit the sampling decision from the incoming trace. + """ + self.generate_propagation_context(environ_or_headers) + + span_context = self._incoming_otel_span_context() + if span_context is None: + yield + else: + with use_span(NonRecordingSpan(span_context)): + yield + + def _incoming_otel_span_context(self): + # type: () -> Optional[SpanContext] + if self._propagation_context is None: + return None + # If sentry-trace extraction didn't have a parent_span_id, we don't have an upstream header + if self._propagation_context.parent_span_id is None: + return None + + trace_flags = TraceFlags( + TraceFlags.SAMPLED + if self._propagation_context.parent_sampled + else TraceFlags.DEFAULT + ) + + if self._propagation_context.baggage: + trace_state = trace_state_from_baggage(self._propagation_context.baggage) + else: + trace_state = TraceState() + + # for twp to work, we also need to consider deferred sampling when the sampling + # flag is not present, so the above TraceFlags are not sufficient + if self._propagation_context.parent_sampled is None: + trace_state = trace_state.update(TRACESTATE_SAMPLED_KEY, "deferred") + + span_context = SpanContext( + trace_id=int(self._propagation_context.trace_id, 16), + span_id=int(self._propagation_context.parent_span_id, 16), + is_remote=True, + trace_flags=trace_flags, + trace_state=trace_state, + ) + + return span_context + + def start_transaction(self, **kwargs): + # type: (Any) -> Span + """ + .. deprecated:: 3.0.0 + This function is deprecated and will be removed in a future release. + Use :py:meth:`sentry_sdk.start_span` instead. + """ + warnings.warn( + "The `start_transaction` method is deprecated, please use `sentry_sdk.start_span instead.`", + DeprecationWarning, + stacklevel=2, + ) + return self.start_span(**kwargs) + + def start_span(self, **kwargs): + # type: (Any) -> Span + return Span(**kwargs) + + +_INITIAL_CURRENT_SCOPE = PotelScope(ty=ScopeType.CURRENT) +_INITIAL_ISOLATION_SCOPE = PotelScope(ty=ScopeType.ISOLATION) + + +def setup_initial_scopes(): + # type: () -> None + global _INITIAL_CURRENT_SCOPE, _INITIAL_ISOLATION_SCOPE + _INITIAL_CURRENT_SCOPE = PotelScope(ty=ScopeType.CURRENT) + _INITIAL_ISOLATION_SCOPE = PotelScope(ty=ScopeType.ISOLATION) + + scopes = (_INITIAL_CURRENT_SCOPE, _INITIAL_ISOLATION_SCOPE) + attach(set_value(SENTRY_SCOPES_KEY, scopes)) + + +def setup_scope_context_management(): + # type: () -> None + import opentelemetry.context + + opentelemetry.context._RUNTIME_CONTEXT = SentryContextVarsRuntimeContext() + setup_initial_scopes() + + +@contextmanager +def isolation_scope(): + # type: () -> Generator[PotelScope, None, None] + context = set_value(SENTRY_FORK_ISOLATION_SCOPE_KEY, True) + token = attach(context) + try: + yield PotelScope.get_isolation_scope() + finally: + detach(token) + + +@contextmanager +def new_scope(): + # type: () -> Generator[PotelScope, None, None] + token = attach(get_current()) + try: + yield PotelScope.get_current_scope() + finally: + detach(token) + + +@contextmanager +def use_scope(scope): + # type: (PotelScope) -> Generator[PotelScope, None, None] + context = set_value(SENTRY_USE_CURRENT_SCOPE_KEY, scope) + token = attach(context) + + try: + yield scope + finally: + detach(token) + + +@contextmanager +def use_isolation_scope(isolation_scope): + # type: (PotelScope) -> Generator[PotelScope, None, None] + context = set_value(SENTRY_USE_ISOLATION_SCOPE_KEY, isolation_scope) + token = attach(context) + + try: + yield isolation_scope + finally: + detach(token) diff --git a/sentry_sdk/opentelemetry/span_processor.py b/sentry_sdk/opentelemetry/span_processor.py new file mode 100644 index 0000000000..b5279bccb0 --- /dev/null +++ b/sentry_sdk/opentelemetry/span_processor.py @@ -0,0 +1,329 @@ +from collections import deque, defaultdict +from typing import cast + +from opentelemetry.trace import ( + format_trace_id, + format_span_id, + get_current_span, + INVALID_SPAN, + Span as AbstractSpan, +) +from opentelemetry.context import Context +from opentelemetry.sdk.trace import Span, ReadableSpan, SpanProcessor + +import sentry_sdk +from sentry_sdk.consts import SPANDATA, DEFAULT_SPAN_ORIGIN +from sentry_sdk.utils import get_current_thread_meta +from sentry_sdk.opentelemetry.consts import ( + OTEL_SENTRY_CONTEXT, + SentrySpanAttribute, +) +from sentry_sdk.opentelemetry.sampler import create_sampling_context +from sentry_sdk.opentelemetry.utils import ( + is_sentry_span, + convert_from_otel_timestamp, + extract_span_attributes, + extract_span_data, + extract_transaction_name_source, + get_trace_context, + get_profile_context, + get_sentry_meta, + set_sentry_meta, +) +from sentry_sdk.profiler.continuous_profiler import ( + try_autostart_continuous_profiler, + get_profiler_id, + try_profile_lifecycle_trace_start, +) +from sentry_sdk.profiler.transaction_profiler import Profile +from sentry_sdk._types import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Optional, List, Any, Deque, DefaultDict + from sentry_sdk._types import Event + + +DEFAULT_MAX_SPANS = 1000 + + +class SentrySpanProcessor(SpanProcessor): + """ + Converts OTel spans into Sentry spans so they can be sent to the Sentry backend. + """ + + def __new__(cls): + # type: () -> SentrySpanProcessor + if not hasattr(cls, "instance"): + cls.instance = super().__new__(cls) + + return cls.instance + + def __init__(self): + # type: () -> None + self._children_spans = defaultdict( + list + ) # type: DefaultDict[int, List[ReadableSpan]] + self._dropped_spans = defaultdict(lambda: 0) # type: DefaultDict[int, int] + + def on_start(self, span, parent_context=None): + # type: (Span, Optional[Context]) -> None + if is_sentry_span(span): + return + + self._add_root_span(span, get_current_span(parent_context)) + self._start_profile(span) + + def on_end(self, span): + # type: (ReadableSpan) -> None + if is_sentry_span(span): + return + + is_root_span = not span.parent or span.parent.is_remote + if is_root_span: + # if have a root span ending, stop the profiler, build a transaction and send it + self._stop_profile(span) + self._flush_root_span(span) + else: + self._append_child_span(span) + + # TODO-neel-potel not sure we need a clear like JS + def shutdown(self): + # type: () -> None + pass + + # TODO-neel-potel change default? this is 30 sec + # TODO-neel-potel call this in client.flush + def force_flush(self, timeout_millis=30000): + # type: (int) -> bool + return True + + def _add_root_span(self, span, parent_span): + # type: (Span, AbstractSpan) -> None + """ + This is required to make Span.root_span work + since we can't traverse back to the root purely with otel efficiently. + """ + if parent_span != INVALID_SPAN and not parent_span.get_span_context().is_remote: + # child span points to parent's root or parent + parent_root_span = get_sentry_meta(parent_span, "root_span") + set_sentry_meta(span, "root_span", parent_root_span or parent_span) + else: + # root span points to itself + set_sentry_meta(span, "root_span", span) + + def _start_profile(self, span): + # type: (Span) -> None + try_autostart_continuous_profiler() + + profiler_id = get_profiler_id() + thread_id, thread_name = get_current_thread_meta() + + if profiler_id: + span.set_attribute(SPANDATA.PROFILER_ID, profiler_id) + if thread_id: + span.set_attribute(SPANDATA.THREAD_ID, str(thread_id)) + if thread_name: + span.set_attribute(SPANDATA.THREAD_NAME, thread_name) + + is_root_span = not span.parent or span.parent.is_remote + sampled = span.context and span.context.trace_flags.sampled + + if is_root_span and sampled: + # profiler uses time.perf_counter_ns() so we cannot use the + # unix timestamp that is on span.start_time + # setting it to 0 means the profiler will internally measure time on start + profile = Profile(sampled, 0) + + sampling_context = create_sampling_context( + span.name, span.attributes, span.parent, span.context.trace_id + ) + profile._set_initial_sampling_decision(sampling_context) + profile.__enter__() + set_sentry_meta(span, "profile", profile) + + continuous_profile = try_profile_lifecycle_trace_start() + profiler_id = get_profiler_id() + if profiler_id: + span.set_attribute(SPANDATA.PROFILER_ID, profiler_id) + set_sentry_meta(span, "continuous_profile", continuous_profile) + + def _stop_profile(self, span): + # type: (ReadableSpan) -> None + continuous_profiler = get_sentry_meta(span, "continuous_profile") + if continuous_profiler: + continuous_profiler.stop() + + def _flush_root_span(self, span): + # type: (ReadableSpan) -> None + transaction_event = self._root_span_to_transaction_event(span) + if not transaction_event: + return + + collected_spans, dropped_spans = self._collect_children(span) + spans = [] + for child in collected_spans: + span_json = self._span_to_json(child) + if span_json: + spans.append(span_json) + + transaction_event["spans"] = spans + if dropped_spans > 0: + transaction_event["_dropped_spans"] = dropped_spans + + # TODO-neel-potel sort and cutoff max spans + + sentry_sdk.capture_event(transaction_event) + + def _append_child_span(self, span): + # type: (ReadableSpan) -> None + if not span.parent: + return + + max_spans = ( + sentry_sdk.get_client().options["_experiments"].get("max_spans") + or DEFAULT_MAX_SPANS + ) + + children_spans = self._children_spans[span.parent.span_id] + if len(children_spans) < max_spans: + children_spans.append(span) + else: + self._dropped_spans[span.parent.span_id] += 1 + + def _collect_children(self, span): + # type: (ReadableSpan) -> tuple[List[ReadableSpan], int] + if not span.context: + return [], 0 + + children = [] + dropped_spans = 0 + bfs_queue = deque() # type: Deque[int] + bfs_queue.append(span.context.span_id) + + while bfs_queue: + parent_span_id = bfs_queue.popleft() + node_children = self._children_spans.pop(parent_span_id, []) + dropped_spans += self._dropped_spans.pop(parent_span_id, 0) + children.extend(node_children) + bfs_queue.extend( + [child.context.span_id for child in node_children if child.context] + ) + + return children, dropped_spans + + # we construct the event from scratch here + # and not use the current Transaction class for easier refactoring + def _root_span_to_transaction_event(self, span): + # type: (ReadableSpan) -> Optional[Event] + if not span.context: + return None + + event = self._common_span_transaction_attributes_as_json(span) + if event is None: + return None + + transaction_name, transaction_source = extract_transaction_name_source(span) + span_data = extract_span_data(span) + trace_context = get_trace_context(span, span_data=span_data) + contexts = {"trace": trace_context} + + profile_context = get_profile_context(span) + if profile_context: + contexts["profile"] = profile_context + + (_, description, _, http_status, _) = span_data + + if http_status: + contexts["response"] = {"status_code": http_status} + + if span.resource.attributes: + contexts[OTEL_SENTRY_CONTEXT] = {"resource": dict(span.resource.attributes)} + + event.update( + { + "type": "transaction", + "transaction": transaction_name or description, + "transaction_info": {"source": transaction_source or "custom"}, + "contexts": contexts, + } + ) + + profile = cast("Optional[Profile]", get_sentry_meta(span, "profile")) + if profile: + profile.__exit__(None, None, None) + if profile.valid(): + event["profile"] = profile + set_sentry_meta(span, "profile", None) + + return event + + def _span_to_json(self, span): + # type: (ReadableSpan) -> Optional[dict[str, Any]] + if not span.context: + return None + + # This is a safe cast because dict[str, Any] is a superset of Event + span_json = cast( + "dict[str, Any]", self._common_span_transaction_attributes_as_json(span) + ) + if span_json is None: + return None + + trace_id = format_trace_id(span.context.trace_id) + span_id = format_span_id(span.context.span_id) + parent_span_id = format_span_id(span.parent.span_id) if span.parent else None + + (op, description, status, _, origin) = extract_span_data(span) + + span_json.update( + { + "trace_id": trace_id, + "span_id": span_id, + "op": op, + "description": description, + "status": status, + "origin": origin or DEFAULT_SPAN_ORIGIN, + } + ) + + if parent_span_id: + span_json["parent_span_id"] = parent_span_id + + attributes = getattr(span, "attributes", {}) or {} + if attributes: + span_json["data"] = {} + for key, value in attributes.items(): + if not key.startswith("_"): + span_json["data"][key] = value + + return span_json + + def _common_span_transaction_attributes_as_json(self, span): + # type: (ReadableSpan) -> Optional[Event] + if not span.start_time or not span.end_time: + return None + + common_json = { + "start_timestamp": convert_from_otel_timestamp(span.start_time), + "timestamp": convert_from_otel_timestamp(span.end_time), + } # type: Event + + tags = extract_span_attributes(span, SentrySpanAttribute.TAG) + if tags: + common_json["tags"] = tags + + return common_json + + def _log_debug_info(self): + # type: () -> None + import pprint + + pprint.pprint( + { + format_span_id(span_id): [ + (format_span_id(child.context.span_id), child.name) + for child in children + ] + for span_id, children in self._children_spans.items() + } + ) diff --git a/sentry_sdk/opentelemetry/tracing.py b/sentry_sdk/opentelemetry/tracing.py new file mode 100644 index 0000000000..8392c1515a --- /dev/null +++ b/sentry_sdk/opentelemetry/tracing.py @@ -0,0 +1,35 @@ +from opentelemetry import trace +from opentelemetry.propagate import set_global_textmap +from opentelemetry.sdk.trace import TracerProvider, Span, ReadableSpan + +from sentry_sdk.opentelemetry import ( + SentryPropagator, + SentrySampler, + SentrySpanProcessor, +) + + +def patch_readable_span(): + # type: () -> None + """ + We need to pass through sentry specific metadata/objects from Span to ReadableSpan + to work with them consistently in the SpanProcessor. + """ + old_readable_span = Span._readable_span + + def sentry_patched_readable_span(self): + # type: (Span) -> ReadableSpan + readable_span = old_readable_span(self) + readable_span._sentry_meta = getattr(self, "_sentry_meta", {}) # type: ignore[attr-defined] + return readable_span + + Span._readable_span = sentry_patched_readable_span # type: ignore[method-assign] + + +def setup_sentry_tracing(): + # type: () -> None + provider = TracerProvider(sampler=SentrySampler()) + provider.add_span_processor(SentrySpanProcessor()) + trace.set_tracer_provider(provider) + + set_global_textmap(SentryPropagator()) diff --git a/sentry_sdk/opentelemetry/utils.py b/sentry_sdk/opentelemetry/utils.py new file mode 100644 index 0000000000..b9dbbd5f09 --- /dev/null +++ b/sentry_sdk/opentelemetry/utils.py @@ -0,0 +1,476 @@ +import re +from typing import cast +from datetime import datetime, timezone + +from urllib3.util import parse_url as urlparse +from urllib.parse import quote, unquote +from opentelemetry.trace import ( + Span as AbstractSpan, + SpanKind, + StatusCode, + format_trace_id, + format_span_id, + TraceState, +) +from opentelemetry.semconv.trace import SpanAttributes +from opentelemetry.sdk.trace import ReadableSpan + +import sentry_sdk +from sentry_sdk.utils import Dsn +from sentry_sdk.consts import ( + SPANSTATUS, + OP, + SPANDATA, + DEFAULT_SPAN_ORIGIN, + LOW_QUALITY_TRANSACTION_SOURCES, +) +from sentry_sdk.opentelemetry.consts import SentrySpanAttribute +from sentry_sdk.tracing_utils import Baggage, get_span_status_from_http_code + +from sentry_sdk._types import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Optional, Mapping, Sequence, Union + from sentry_sdk._types import OtelExtractedSpanData + + +GRPC_ERROR_MAP = { + "1": SPANSTATUS.CANCELLED, + "2": SPANSTATUS.UNKNOWN_ERROR, + "3": SPANSTATUS.INVALID_ARGUMENT, + "4": SPANSTATUS.DEADLINE_EXCEEDED, + "5": SPANSTATUS.NOT_FOUND, + "6": SPANSTATUS.ALREADY_EXISTS, + "7": SPANSTATUS.PERMISSION_DENIED, + "8": SPANSTATUS.RESOURCE_EXHAUSTED, + "9": SPANSTATUS.FAILED_PRECONDITION, + "10": SPANSTATUS.ABORTED, + "11": SPANSTATUS.OUT_OF_RANGE, + "12": SPANSTATUS.UNIMPLEMENTED, + "13": SPANSTATUS.INTERNAL_ERROR, + "14": SPANSTATUS.UNAVAILABLE, + "15": SPANSTATUS.DATA_LOSS, + "16": SPANSTATUS.UNAUTHENTICATED, +} + + +def is_sentry_span(span): + # type: (ReadableSpan) -> bool + """ + Break infinite loop: + HTTP requests to Sentry are caught by OTel and send again to Sentry. + """ + from sentry_sdk import get_client + + if not span.attributes: + return False + + span_url = span.attributes.get(SpanAttributes.HTTP_URL, None) + span_url = cast("Optional[str]", span_url) + + if not span_url: + return False + + dsn_url = None + client = get_client() + + if client.dsn: + try: + dsn_url = Dsn(client.dsn).netloc + except Exception: + pass + + if not dsn_url: + return False + + if dsn_url in span_url: + return True + + return False + + +def convert_from_otel_timestamp(time): + # type: (int) -> datetime + """Convert an OTel nanosecond-level timestamp to a datetime.""" + return datetime.fromtimestamp(time / 1e9, timezone.utc) + + +def convert_to_otel_timestamp(time): + # type: (Union[datetime, float]) -> int + """Convert a datetime to an OTel timestamp (with nanosecond precision).""" + if isinstance(time, datetime): + return int(time.timestamp() * 1e9) + return int(time * 1e9) + + +def extract_transaction_name_source(span): + # type: (ReadableSpan) -> tuple[Optional[str], Optional[str]] + if not span.attributes: + return (None, None) + return ( + cast("Optional[str]", span.attributes.get(SentrySpanAttribute.NAME)), + cast("Optional[str]", span.attributes.get(SentrySpanAttribute.SOURCE)), + ) + + +def extract_span_data(span): + # type: (ReadableSpan) -> OtelExtractedSpanData + op = span.name + description = span.name + status, http_status = extract_span_status(span) + origin = None + if span.attributes is None: + return (op, description, status, http_status, origin) + + attribute_op = cast("Optional[str]", span.attributes.get(SentrySpanAttribute.OP)) + op = attribute_op or op + description = cast( + "str", span.attributes.get(SentrySpanAttribute.DESCRIPTION) or description + ) + origin = cast("Optional[str]", span.attributes.get(SentrySpanAttribute.ORIGIN)) + + http_method = span.attributes.get(SpanAttributes.HTTP_METHOD) + http_method = cast("Optional[str]", http_method) + if http_method: + return span_data_for_http_method(span) + + db_query = span.attributes.get(SpanAttributes.DB_SYSTEM) + if db_query: + return span_data_for_db_query(span) + + rpc_service = span.attributes.get(SpanAttributes.RPC_SERVICE) + if rpc_service: + return ( + attribute_op or "rpc", + description, + status, + http_status, + origin, + ) + + messaging_system = span.attributes.get(SpanAttributes.MESSAGING_SYSTEM) + if messaging_system: + return ( + attribute_op or "message", + description, + status, + http_status, + origin, + ) + + faas_trigger = span.attributes.get(SpanAttributes.FAAS_TRIGGER) + if faas_trigger: + return (str(faas_trigger), description, status, http_status, origin) + + return (op, description, status, http_status, origin) + + +def span_data_for_http_method(span): + # type: (ReadableSpan) -> OtelExtractedSpanData + span_attributes = span.attributes or {} + + op = cast("Optional[str]", span_attributes.get(SentrySpanAttribute.OP)) + if op is None: + op = "http" + + if span.kind == SpanKind.SERVER: + op += ".server" + elif span.kind == SpanKind.CLIENT: + op += ".client" + + http_method = span_attributes.get(SpanAttributes.HTTP_METHOD) + route = span_attributes.get(SpanAttributes.HTTP_ROUTE) + target = span_attributes.get(SpanAttributes.HTTP_TARGET) + peer_name = span_attributes.get(SpanAttributes.NET_PEER_NAME) + + # TODO-neel-potel remove description completely + description = span_attributes.get( + SentrySpanAttribute.DESCRIPTION + ) or span_attributes.get(SentrySpanAttribute.NAME) + description = cast("Optional[str]", description) + if description is None: + description = f"{http_method}" + + if route: + description = f"{http_method} {route}" + elif target: + description = f"{http_method} {target}" + elif peer_name: + description = f"{http_method} {peer_name}" + else: + url = span_attributes.get(SpanAttributes.HTTP_URL) + url = cast("Optional[str]", url) + + if url: + parsed_url = urlparse(url) + url = "{}://{}{}".format( + parsed_url.scheme, parsed_url.netloc, parsed_url.path + ) + description = f"{http_method} {url}" + + status, http_status = extract_span_status(span) + + origin = cast("Optional[str]", span_attributes.get(SentrySpanAttribute.ORIGIN)) + + return (op, description, status, http_status, origin) + + +def span_data_for_db_query(span): + # type: (ReadableSpan) -> OtelExtractedSpanData + span_attributes = span.attributes or {} + + op = cast("str", span_attributes.get(SentrySpanAttribute.OP, OP.DB)) + + statement = span_attributes.get(SpanAttributes.DB_STATEMENT, None) + statement = cast("Optional[str]", statement) + + description = statement or span.name + origin = cast("Optional[str]", span_attributes.get(SentrySpanAttribute.ORIGIN)) + + return (op, description, None, None, origin) + + +def extract_span_status(span): + # type: (ReadableSpan) -> tuple[Optional[str], Optional[int]] + span_attributes = span.attributes or {} + status = span.status or None + + if status: + inferred_status, http_status = infer_status_from_attributes(span_attributes) + + if status.status_code == StatusCode.OK: + return (SPANSTATUS.OK, http_status) + elif status.status_code == StatusCode.ERROR: + if status.description is None: + if inferred_status: + return (inferred_status, http_status) + + if http_status is not None: + return (inferred_status, http_status) + + if ( + status.description is not None + and status.description in GRPC_ERROR_MAP.values() + ): + return (status.description, None) + else: + return (SPANSTATUS.UNKNOWN_ERROR, None) + + inferred_status, http_status = infer_status_from_attributes(span_attributes) + if inferred_status: + return (inferred_status, http_status) + + if status and status.status_code == StatusCode.UNSET: + return (None, None) + else: + return (SPANSTATUS.UNKNOWN_ERROR, None) + + +def infer_status_from_attributes(span_attributes): + # type: (Mapping[str, str | bool | int | float | Sequence[str] | Sequence[bool] | Sequence[int] | Sequence[float]]) -> tuple[Optional[str], Optional[int]] + http_status = get_http_status_code(span_attributes) + + if http_status: + return (get_span_status_from_http_code(http_status), http_status) + + grpc_status = span_attributes.get(SpanAttributes.RPC_GRPC_STATUS_CODE) + if grpc_status: + return (GRPC_ERROR_MAP.get(str(grpc_status), SPANSTATUS.UNKNOWN_ERROR), None) + + return (None, None) + + +def get_http_status_code(span_attributes): + # type: (Mapping[str, str | bool | int | float | Sequence[str] | Sequence[bool] | Sequence[int] | Sequence[float]]) -> Optional[int] + try: + http_status = span_attributes.get(SpanAttributes.HTTP_RESPONSE_STATUS_CODE) + except AttributeError: + # HTTP_RESPONSE_STATUS_CODE was added in 1.21, so if we're on an older + # OTel version SpanAttributes.HTTP_RESPONSE_STATUS_CODE will throw an + # AttributeError + http_status = None + + if http_status is None: + # Fall back to the deprecated attribute + http_status = span_attributes.get(SpanAttributes.HTTP_STATUS_CODE) + + http_status = cast("Optional[int]", http_status) + + return http_status + + +def extract_span_attributes(span, namespace): + # type: (ReadableSpan, str) -> dict[str, Any] + """ + Extract Sentry-specific span attributes and make them look the way Sentry expects. + """ + extracted_attrs = {} # type: dict[str, Any] + + for attr, value in (span.attributes or {}).items(): + if attr.startswith(namespace): + key = attr[len(namespace) + 1 :] + extracted_attrs[key] = value + + return extracted_attrs + + +def get_trace_context(span, span_data=None): + # type: (ReadableSpan, Optional[OtelExtractedSpanData]) -> dict[str, Any] + if not span.context: + return {} + + trace_id = format_trace_id(span.context.trace_id) + span_id = format_span_id(span.context.span_id) + parent_span_id = format_span_id(span.parent.span_id) if span.parent else None + + if span_data is None: + span_data = extract_span_data(span) + + (op, _, status, _, origin) = span_data + + trace_context = { + "trace_id": trace_id, + "span_id": span_id, + "parent_span_id": parent_span_id, + "op": op, + "origin": origin or DEFAULT_SPAN_ORIGIN, + } # type: dict[str, Any] + + if status: + trace_context["status"] = status + + if span.attributes: + trace_context["data"] = dict(span.attributes) + + trace_state = get_trace_state(span) + trace_context["dynamic_sampling_context"] = dsc_from_trace_state(trace_state) + + # TODO-neel-potel profiler thread_id, thread_name + + return trace_context + + +def trace_state_from_baggage(baggage): + # type: (Baggage) -> TraceState + items = [] + for k, v in baggage.sentry_items.items(): + key = Baggage.SENTRY_PREFIX + quote(k) + val = quote(str(v)) + items.append((key, val)) + return TraceState(items) + + +def baggage_from_trace_state(trace_state): + # type: (TraceState) -> Baggage + return Baggage(dsc_from_trace_state(trace_state)) + + +def serialize_trace_state(trace_state): + # type: (TraceState) -> str + sentry_items = [] + for k, v in trace_state.items(): + if Baggage.SENTRY_PREFIX_REGEX.match(k): + sentry_items.append((k, v)) + return ",".join(key + "=" + value for key, value in sentry_items) + + +def dsc_from_trace_state(trace_state): + # type: (TraceState) -> dict[str, str] + dsc = {} + for k, v in trace_state.items(): + if Baggage.SENTRY_PREFIX_REGEX.match(k): + key = re.sub(Baggage.SENTRY_PREFIX_REGEX, "", k) + dsc[unquote(key)] = unquote(v) + return dsc + + +def has_incoming_trace(trace_state): + # type: (TraceState) -> bool + """ + The existence of a sentry-trace_id in the baggage implies we continued an upstream trace. + """ + return (Baggage.SENTRY_PREFIX + "trace_id") in trace_state + + +def get_trace_state(span): + # type: (Union[AbstractSpan, ReadableSpan]) -> TraceState + """ + Get the existing trace_state with sentry items + or populate it if we are the head SDK. + """ + span_context = span.get_span_context() + if not span_context: + return TraceState() + + trace_state = span_context.trace_state + + if has_incoming_trace(trace_state): + return trace_state + else: + client = sentry_sdk.get_client() + if not client.is_active(): + return trace_state + + options = client.options or {} + + trace_state = trace_state.update( + Baggage.SENTRY_PREFIX + "trace_id", + quote(format_trace_id(span_context.trace_id)), + ) + + if options.get("environment"): + trace_state = trace_state.update( + Baggage.SENTRY_PREFIX + "environment", quote(options["environment"]) + ) + + if options.get("release"): + trace_state = trace_state.update( + Baggage.SENTRY_PREFIX + "release", quote(options["release"]) + ) + + if options.get("dsn"): + trace_state = trace_state.update( + Baggage.SENTRY_PREFIX + "public_key", + quote(Dsn(options["dsn"]).public_key), + ) + + root_span = get_sentry_meta(span, "root_span") + if root_span and isinstance(root_span, ReadableSpan): + transaction_name, transaction_source = extract_transaction_name_source( + root_span + ) + + if ( + transaction_name + and transaction_source not in LOW_QUALITY_TRANSACTION_SOURCES + ): + trace_state = trace_state.update( + Baggage.SENTRY_PREFIX + "transaction", quote(transaction_name) + ) + + return trace_state + + +def get_sentry_meta(span, key): + # type: (Union[AbstractSpan, ReadableSpan], str) -> Any + sentry_meta = getattr(span, "_sentry_meta", None) + return sentry_meta.get(key) if sentry_meta else None + + +def set_sentry_meta(span, key, value): + # type: (Union[AbstractSpan, ReadableSpan], str, Any) -> None + sentry_meta = getattr(span, "_sentry_meta", {}) + sentry_meta[key] = value + span._sentry_meta = sentry_meta # type: ignore[union-attr] + + +def get_profile_context(span): + # type: (ReadableSpan) -> Optional[dict[str, str]] + if not span.attributes: + return None + + profiler_id = cast("Optional[str]", span.attributes.get(SPANDATA.PROFILER_ID)) + if profiler_id is None: + return None + + return {"profiler_id": profiler_id} diff --git a/sentry_sdk/profiler/__init__.py b/sentry_sdk/profiler/__init__.py index 0bc63e3a6d..762bd4d9cf 100644 --- a/sentry_sdk/profiler/__init__.py +++ b/sentry_sdk/profiler/__init__.py @@ -1,49 +1,9 @@ from sentry_sdk.profiler.continuous_profiler import ( - start_profile_session, start_profiler, - stop_profile_session, stop_profiler, ) -from sentry_sdk.profiler.transaction_profiler import ( - MAX_PROFILE_DURATION_NS, - PROFILE_MINIMUM_SAMPLES, - Profile, - Scheduler, - ThreadScheduler, - GeventScheduler, - has_profiling_enabled, - setup_profiler, - teardown_profiler, -) -from sentry_sdk.profiler.utils import ( - DEFAULT_SAMPLING_FREQUENCY, - MAX_STACK_DEPTH, - get_frame_name, - extract_frame, - extract_stack, - frame_id, -) __all__ = [ - "start_profile_session", # TODO: Deprecate this in favor of `start_profiler` "start_profiler", - "stop_profile_session", # TODO: Deprecate this in favor of `stop_profiler` "stop_profiler", - # DEPRECATED: The following was re-exported for backwards compatibility. It - # will be removed from sentry_sdk.profiler in a future release. - "MAX_PROFILE_DURATION_NS", - "PROFILE_MINIMUM_SAMPLES", - "Profile", - "Scheduler", - "ThreadScheduler", - "GeventScheduler", - "has_profiling_enabled", - "setup_profiler", - "teardown_profiler", - "DEFAULT_SAMPLING_FREQUENCY", - "MAX_STACK_DEPTH", - "get_frame_name", - "extract_frame", - "extract_stack", - "frame_id", ] diff --git a/sentry_sdk/profiler/continuous_profiler.py b/sentry_sdk/profiler/continuous_profiler.py index 77ba60dbda..371f61c632 100644 --- a/sentry_sdk/profiler/continuous_profiler.py +++ b/sentry_sdk/profiler/continuous_profiler.py @@ -5,7 +5,6 @@ import threading import time import uuid -import warnings from collections import deque from datetime import datetime, timezone @@ -88,15 +87,9 @@ def setup_continuous_profiler(options, sdk_info, capture_func): else: default_profiler_mode = ThreadContinuousScheduler.mode + profiler_mode = default_profiler_mode if options.get("profiler_mode") is not None: profiler_mode = options["profiler_mode"] - else: - # TODO: deprecate this and just use the existing `profiler_mode` - experiments = options.get("_experiments", {}) - - profiler_mode = ( - experiments.get("continuous_profiling_mode") or default_profiler_mode - ) frequency = DEFAULT_SAMPLING_FREQUENCY @@ -152,17 +145,6 @@ def start_profiler(): _scheduler.manual_start() -def start_profile_session(): - # type: () -> None - - warnings.warn( - "The `start_profile_session` function is deprecated. Please use `start_profile` instead.", - DeprecationWarning, - stacklevel=2, - ) - start_profiler() - - def stop_profiler(): # type: () -> None if _scheduler is None: @@ -171,17 +153,6 @@ def stop_profiler(): _scheduler.manual_stop() -def stop_profile_session(): - # type: () -> None - - warnings.warn( - "The `stop_profile_session` function is deprecated. Please use `stop_profile` instead.", - DeprecationWarning, - stacklevel=2, - ) - stop_profiler() - - def teardown_continuous_profiler(): # type: () -> None stop_profiler() diff --git a/sentry_sdk/profiler/transaction_profiler.py b/sentry_sdk/profiler/transaction_profiler.py index 3743b7c905..095ce2f2f9 100644 --- a/sentry_sdk/profiler/transaction_profiler.py +++ b/sentry_sdk/profiler/transaction_profiler.py @@ -33,7 +33,6 @@ import threading import time import uuid -import warnings from abc import ABC, abstractmethod from collections import deque @@ -49,7 +48,6 @@ is_gevent, is_valid_sample_rate, logger, - nanosecond_time, set_in_app_in_frames, ) @@ -127,16 +125,6 @@ def has_profiling_enabled(options): if profiles_sample_rate is not None and profiles_sample_rate > 0: return True - profiles_sample_rate = options["_experiments"].get("profiles_sample_rate") - if profiles_sample_rate is not None: - logger.warning( - "_experiments['profiles_sample_rate'] is deprecated. " - "Please use the non-experimental profiles_sample_rate option " - "directly." - ) - if profiles_sample_rate > 0: - return True - return False @@ -159,16 +147,9 @@ def setup_profiler(options): else: default_profiler_mode = ThreadScheduler.mode + profiler_mode = default_profiler_mode if options.get("profiler_mode") is not None: profiler_mode = options["profiler_mode"] - else: - profiler_mode = options.get("_experiments", {}).get("profiler_mode") - if profiler_mode is not None: - logger.warning( - "_experiments['profiler_mode'] is deprecated. Please use the " - "non-experimental profiler_mode option directly." - ) - profiler_mode = profiler_mode or default_profiler_mode if ( profiler_mode == ThreadScheduler.mode @@ -210,7 +191,6 @@ def __init__( self, sampled, # type: Optional[bool] start_ns, # type: int - hub=None, # type: Optional[sentry_sdk.Hub] scheduler=None, # type: Optional[Scheduler] ): # type: (...) -> None @@ -241,16 +221,6 @@ def __init__( self.unique_samples = 0 - # Backwards compatibility with the old hub property - self._hub = None # type: Optional[sentry_sdk.Hub] - if hub is not None: - self._hub = hub - warnings.warn( - "The `hub` parameter is deprecated. Please do not use it.", - DeprecationWarning, - stacklevel=2, - ) - def update_active_thread_id(self): # type: () -> None self.active_thread_id = get_current_thread_meta()[0] @@ -296,12 +266,11 @@ def _set_initial_sampling_decision(self, sampling_context): options = client.options + sample_rate = None if callable(options.get("profiles_sampler")): sample_rate = options["profiles_sampler"](sampling_context) elif options["profiles_sample_rate"] is not None: sample_rate = options["profiles_sample_rate"] - else: - sample_rate = options["_experiments"].get("profiles_sample_rate") # The profiles_sample_rate option was not set, so profiling # was never enabled. @@ -342,7 +311,7 @@ def start(self): logger.debug("[Profiling] Starting profile") self.active = True if not self.start_ns: - self.start_ns = nanosecond_time() + self.start_ns = time.perf_counter_ns() self.scheduler.start_profiling(self) def stop(self): @@ -353,7 +322,7 @@ def stop(self): assert self.scheduler, "No scheduler specified" logger.debug("[Profiling] Stopping profile") self.active = False - self.stop_ns = nanosecond_time() + self.stop_ns = time.perf_counter_ns() def __enter__(self): # type: () -> Profile @@ -517,26 +486,6 @@ def valid(self): return True - @property - def hub(self): - # type: () -> Optional[sentry_sdk.Hub] - warnings.warn( - "The `hub` attribute is deprecated. Please do not access it.", - DeprecationWarning, - stacklevel=2, - ) - return self._hub - - @hub.setter - def hub(self, value): - # type: (Optional[sentry_sdk.Hub]) -> None - warnings.warn( - "The `hub` attribute is deprecated. Please do not set it.", - DeprecationWarning, - stacklevel=2, - ) - self._hub = value - class Scheduler(ABC): mode = "unknown" # type: ProfilerMode @@ -612,7 +561,7 @@ def _sample_stack(*args, **kwargs): # were started after this point. new_profiles = len(self.new_profiles) - now = nanosecond_time() + now = time.perf_counter_ns() try: sample = [ diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index f346569255..975ac6fe04 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -11,27 +11,23 @@ from sentry_sdk._types import AnnotatedValue from sentry_sdk.attachments import Attachment -from sentry_sdk.consts import DEFAULT_MAX_BREADCRUMBS, FALSE_VALUES, INSTRUMENTER -from sentry_sdk.feature_flags import FlagBuffer, DEFAULT_FLAG_CAPACITY -from sentry_sdk.profiler.continuous_profiler import ( - get_profiler_id, - try_autostart_continuous_profiler, - try_profile_lifecycle_trace_start, +from sentry_sdk.consts import ( + DEFAULT_MAX_BREADCRUMBS, + FALSE_VALUES, + BAGGAGE_HEADER_NAME, + SENTRY_TRACE_HEADER_NAME, ) +from sentry_sdk.feature_flags import FlagBuffer, DEFAULT_FLAG_CAPACITY from sentry_sdk.profiler.transaction_profiler import Profile from sentry_sdk.session import Session from sentry_sdk.tracing_utils import ( Baggage, has_tracing_enabled, - normalize_incoming_data, PropagationContext, ) from sentry_sdk.tracing import ( - BAGGAGE_HEADER_NAME, - SENTRY_TRACE_HEADER_NAME, NoOpSpan, Span, - Transaction, ) from sentry_sdk.utils import ( capture_internal_exception, @@ -44,7 +40,6 @@ logger, ) -import typing from typing import TYPE_CHECKING if TYPE_CHECKING: @@ -62,8 +57,7 @@ from typing import Tuple from typing import TypeVar from typing import Union - - from typing_extensions import Unpack + from typing import Self from sentry_sdk._types import ( Breadcrumb, @@ -74,12 +68,9 @@ ExcInfo, Hint, LogLevelStr, - SamplingContext, Type, ) - from sentry_sdk.tracing import TransactionKwargs - import sentry_sdk P = ParamSpec("P") @@ -115,28 +106,6 @@ class ScopeType(Enum): MERGED = "merged" -class _ScopeManager: - def __init__(self, hub=None): - # type: (Optional[Any]) -> None - self._old_scopes = [] # type: List[Scope] - - def __enter__(self): - # type: () -> Scope - isolation_scope = Scope.get_isolation_scope() - - self._old_scopes.append(isolation_scope) - - forked_scope = isolation_scope.fork() - _isolation_scope.set(forked_scope) - - return forked_scope - - def __exit__(self, exc_type, exc_value, tb): - # type: (Any, Any, Any) -> None - old_scope = self._old_scopes.pop() - _isolation_scope.set(old_scope) - - def add_global_event_processor(processor): # type: (EventProcessor) -> None global_event_processors.append(processor) @@ -225,12 +194,12 @@ def __init__(self, ty=None, client=None): self.generate_propagation_context(incoming_data=incoming_trace_information) def __copy__(self): - # type: () -> Scope + # type: () -> Self """ Returns a copy of this scope. This also creates a copy of all referenced data structures. """ - rv = object.__new__(self.__class__) # type: Scope + rv = object.__new__(self.__class__) # type: Self rv._type = self._type rv.client = self.client @@ -273,13 +242,21 @@ def get_current_scope(cls): Returns the current scope. """ - current_scope = _current_scope.get() + current_scope = cls._get_current_scope() if current_scope is None: current_scope = Scope(ty=ScopeType.CURRENT) _current_scope.set(current_scope) return current_scope + @classmethod + def _get_current_scope(cls): + # type: () -> Optional[Scope] + """ + Returns the current scope without creating a new one. Internal use only. + """ + return _current_scope.get() + @classmethod def set_current_scope(cls, new_current_scope): # type: (Scope) -> None @@ -299,13 +276,21 @@ def get_isolation_scope(cls): Returns the isolation scope. """ - isolation_scope = _isolation_scope.get() + isolation_scope = cls._get_isolation_scope() if isolation_scope is None: isolation_scope = Scope(ty=ScopeType.ISOLATION) _isolation_scope.set(isolation_scope) return isolation_scope + @classmethod + def _get_isolation_scope(cls): + # type: () -> Optional[Scope] + """ + Returns the isolation scope without creating a new one. Internal use only. + """ + return _isolation_scope.get() + @classmethod def set_isolation_scope(cls, new_isolation_scope): # type: (Scope) -> None @@ -349,7 +334,7 @@ def last_event_id(cls): return cls.get_isolation_scope()._last_event_id def _merge_scopes(self, additional_scope=None, additional_scope_kwargs=None): - # type: (Optional[Scope], Optional[Dict[str, Any]]) -> Scope + # type: (Optional[Scope], Optional[Dict[str, Any]]) -> Self """ Merges global, isolation and current scope into a new scope and adds the given additional scope or additional scope kwargs to it. @@ -357,16 +342,17 @@ def _merge_scopes(self, additional_scope=None, additional_scope_kwargs=None): if additional_scope and additional_scope_kwargs: raise TypeError("cannot provide scope and kwargs") - final_scope = copy(_global_scope) if _global_scope is not None else Scope() + final_scope = self.__class__() final_scope._type = ScopeType.MERGED - isolation_scope = _isolation_scope.get() - if isolation_scope is not None: - final_scope.update_from_scope(isolation_scope) + global_scope = self.get_global_scope() + final_scope.update_from_scope(global_scope) - current_scope = _current_scope.get() - if current_scope is not None: - final_scope.update_from_scope(current_scope) + isolation_scope = self.get_isolation_scope() + final_scope.update_from_scope(self.get_isolation_scope()) + + current_scope = self.get_current_scope() + final_scope.update_from_scope(current_scope) if self != current_scope and self != isolation_scope: final_scope.update_from_scope(self) @@ -392,7 +378,7 @@ def get_client(cls): This checks the current scope, the isolation scope and the global scope for a client. If no client is available a :py:class:`sentry_sdk.client.NonRecordingClient` is returned. """ - current_scope = _current_scope.get() + current_scope = cls.get_current_scope() try: client = current_scope.client except AttributeError: @@ -401,7 +387,7 @@ def get_client(cls): if client is not None and client.is_active(): return client - isolation_scope = _isolation_scope.get() + isolation_scope = cls.get_isolation_scope() try: client = isolation_scope.client except AttributeError: @@ -434,7 +420,7 @@ def set_client(self, client=None): self.client = client if client is not None else NonRecordingClient() def fork(self): - # type: () -> Scope + # type: () -> Self """ .. versionadded:: 2.0.0 @@ -496,19 +482,10 @@ def generate_propagation_context(self, incoming_data=None): def get_dynamic_sampling_context(self): # type: () -> Optional[Dict[str, str]] """ - Returns the Dynamic Sampling Context from the Propagation Context. - If not existing, creates a new one. + Returns the Dynamic Sampling Context from the baggage or populates one. """ - if self._propagation_context is None: - return None - baggage = self.get_baggage() - if baggage is not None: - self._propagation_context.dynamic_sampling_context = ( - baggage.dynamic_sampling_context() - ) - - return self._propagation_context.dynamic_sampling_context + return baggage.dynamic_sampling_context() if baggage else None def get_traceparent(self, *args, **kwargs): # type: (Any, Any) -> Optional[str] @@ -519,7 +496,11 @@ def get_traceparent(self, *args, **kwargs): client = self.get_client() # If we have an active span, return traceparent from there - if has_tracing_enabled(client.options) and self.span is not None: + if ( + has_tracing_enabled(client.options) + and self.span is not None + and self.span.is_valid + ): return self.span.to_traceparent() # If this scope has a propagation context, return traceparent from there @@ -538,22 +519,24 @@ def get_baggage(self, *args, **kwargs): """ Returns the Sentry "baggage" header containing trace information from the currently active span or the scopes Propagation Context. + If not existing, creates a new one. """ client = self.get_client() # If we have an active span, return baggage from there - if has_tracing_enabled(client.options) and self.span is not None: + if ( + has_tracing_enabled(client.options) + and self.span is not None + and self.span.is_valid + ): return self.span.to_baggage() # If this scope has a propagation context, return baggage from there + # populate a fresh one if it doesn't exist if self._propagation_context is not None: - dynamic_sampling_context = ( - self._propagation_context.dynamic_sampling_context - ) - if dynamic_sampling_context is None: - return Baggage.from_options(self) - else: - return Baggage(dynamic_sampling_context) + if self._propagation_context.baggage is None: + self._propagation_context.baggage = Baggage.from_options(self) + return self._propagation_context.baggage # Fall back to isolation scope's baggage. It always has one return self.get_isolation_scope().get_baggage() @@ -581,12 +564,6 @@ def trace_propagation_meta(self, *args, **kwargs): Return meta tags which should be injected into HTML templates to allow propagation of trace information. """ - span = kwargs.pop("span", None) - if span is not None: - logger.warning( - "The parameter `span` in trace_propagation_meta() is deprecated and will be removed in the future." - ) - meta = "" sentry_trace = self.get_traceparent() @@ -615,10 +592,9 @@ def iter_headers(self): if traceparent is not None: yield SENTRY_TRACE_HEADER_NAME, traceparent - dsc = self.get_dynamic_sampling_context() - if dsc is not None: - baggage = Baggage(dsc).serialize() - yield BAGGAGE_HEADER_NAME, baggage + baggage = self.get_baggage() + if baggage is not None: + yield BAGGAGE_HEADER_NAME, baggage.serialize() def iter_trace_propagation_headers(self, *args, **kwargs): # type: (Any, Any) -> Generator[Tuple[str, str], None, None] @@ -629,18 +605,11 @@ def iter_trace_propagation_headers(self, *args, **kwargs): If no span is given, the trace data is taken from the scope. """ client = self.get_client() - if not client.options.get("propagate_traces"): - warnings.warn( - "The `propagate_traces` parameter is deprecated. Please use `trace_propagation_targets` instead.", - DeprecationWarning, - stacklevel=2, - ) - return span = kwargs.pop("span", None) span = span or self.span - if has_tracing_enabled(client.options) and span is not None: + if has_tracing_enabled(client.options) and span is not None and span.is_valid: for header in span.iter_headers(): yield header else: @@ -706,23 +675,6 @@ def clear(self): self._last_event_id = None # type: Optional[str] self._flags = None # type: Optional[FlagBuffer] - @_attr_setter - def level(self, value): - # type: (LogLevelStr) -> None - """ - When set this overrides the level. - - .. deprecated:: 1.0.0 - Use :func:`set_level` instead. - - :param value: The level to set. - """ - logger.warning( - "Deprecated: use .set_level() instead. This will be removed in the future." - ) - - self._level = value - def set_level(self, value): # type: (LogLevelStr) -> None """ @@ -739,71 +691,36 @@ def fingerprint(self, value): self._fingerprint = value @property - def transaction(self): - # type: () -> Any - # would be type: () -> Optional[Transaction], see https://github.com/python/mypy/issues/3004 - """Return the transaction (root span) in the scope, if any.""" - - # there is no span/transaction on the scope + def root_span(self): + # type: () -> Optional[Span] + """Return the root span in the scope, if any.""" if self._span is None: return None - # there is an orphan span on the scope - if self._span.containing_transaction is None: - return None - - # there is either a transaction (which is its own containing - # transaction) or a non-orphan span on the scope - return self._span.containing_transaction - - @transaction.setter - def transaction(self, value): - # type: (Any) -> None - # would be type: (Optional[str]) -> None, see https://github.com/python/mypy/issues/3004 - """When set this forces a specific transaction name to be set. - - Deprecated: use set_transaction_name instead.""" - - # XXX: the docstring above is misleading. The implementation of - # apply_to_event prefers an existing value of event.transaction over - # anything set in the scope. - # XXX: note that with the introduction of the Scope.transaction getter, - # there is a semantic and type mismatch between getter and setter. The - # getter returns a Transaction, the setter sets a transaction name. - # Without breaking version compatibility, we could make the setter set a - # transaction name or transaction (self._span) depending on the type of - # the value argument. - - logger.warning( - "Assigning to scope.transaction directly is deprecated: use scope.set_transaction_name() instead." - ) - self._transaction = value - if self._span and self._span.containing_transaction: - self._span.containing_transaction.name = value + return self._span.root_span def set_transaction_name(self, name, source=None): # type: (str, Optional[str]) -> None """Set the transaction name and optionally the transaction source.""" self._transaction = name - if self._span and self._span.containing_transaction: - self._span.containing_transaction.name = name + if self._span and self._span.root_span: + self._span.root_span.name = name if source: - self._span.containing_transaction.source = source + self._span.root_span.source = source if source: self._transaction_info["source"] = source - @_attr_setter - def user(self, value): - # type: (Optional[Dict[str, Any]]) -> None - """When set a specific user is bound to the scope. Deprecated in favor of set_user.""" - warnings.warn( - "The `Scope.user` setter is deprecated in favor of `Scope.set_user()`.", - DeprecationWarning, - stacklevel=2, - ) - self.set_user(value) + @property + def transaction_name(self): + # type: () -> Optional[str] + return self._transaction + + @property + def transaction_source(self): + # type: () -> Optional[str] + return self._transaction_info.get("source") def set_user(self, value): # type: (Optional[Dict[str, Any]]) -> None @@ -816,21 +733,14 @@ def set_user(self, value): @property def span(self): # type: () -> Optional[Span] - """Get/set current tracing span or transaction.""" + """Get current tracing span.""" return self._span @span.setter def span(self, span): # type: (Optional[Span]) -> None + """Set current tracing span.""" self._span = span - # XXX: this differs from the implementation in JS, there Scope.setSpan - # does not set Scope._transactionName. - if isinstance(span, Transaction): - transaction = span - if transaction.name: - self._transaction = transaction.name - if transaction.source: - self._transaction_info["source"] = transaction.source @property def profile(self): @@ -990,195 +900,41 @@ def add_breadcrumb(self, crumb=None, hint=None, **kwargs): self._breadcrumbs.popleft() self._n_breadcrumbs_truncated += 1 - def start_transaction( - self, - transaction=None, - instrumenter=INSTRUMENTER.SENTRY, - custom_sampling_context=None, - **kwargs, - ): - # type: (Optional[Transaction], str, Optional[SamplingContext], Unpack[TransactionKwargs]) -> Union[Transaction, NoOpSpan] + def start_transaction(self, **kwargs): + # type: (Any) -> Union[NoOpSpan, Span] """ - Start and return a transaction. - - Start an existing transaction if given, otherwise create and start a new - transaction with kwargs. - - This is the entry point to manual tracing instrumentation. - - A tree structure can be built by adding child spans to the transaction, - and child spans to other spans. To start a new child span within the - transaction or any span, call the respective `.start_child()` method. - - Every child span must be finished before the transaction is finished, - otherwise the unfinished spans are discarded. - - When used as context managers, spans and transactions are automatically - finished at the end of the `with` block. If not using context managers, - call the `.finish()` method. - - When the transaction is finished, it will be sent to Sentry with all its - finished child spans. - - :param transaction: The transaction to start. If omitted, we create and - start a new transaction. - :param instrumenter: This parameter is meant for internal use only. It - will be removed in the next major version. - :param custom_sampling_context: The transaction's custom sampling context. - :param kwargs: Optional keyword arguments to be passed to the Transaction - constructor. See :py:class:`sentry_sdk.tracing.Transaction` for - available arguments. + .. deprecated:: 3.0.0 + This function is deprecated and will be removed in a future release. + Use :py:meth:`sentry_sdk.start_span` instead. """ - kwargs.setdefault("scope", self) - - client = self.get_client() - - configuration_instrumenter = client.options["instrumenter"] - - if instrumenter != configuration_instrumenter: - return NoOpSpan() - - try_autostart_continuous_profiler() - - custom_sampling_context = custom_sampling_context or {} - - # kwargs at this point has type TransactionKwargs, since we have removed - # the client and custom_sampling_context from it. - transaction_kwargs = kwargs # type: TransactionKwargs - - # if we haven't been given a transaction, make one - if transaction is None: - transaction = Transaction(**transaction_kwargs) - - # use traces_sample_rate, traces_sampler, and/or inheritance to make a - # sampling decision - sampling_context = { - "transaction_context": transaction.to_json(), - "parent_sampled": transaction.parent_sampled, - } - sampling_context.update(custom_sampling_context) - transaction._set_initial_sampling_decision(sampling_context=sampling_context) - - # update the sample rate in the dsc - if transaction.sample_rate is not None: - propagation_context = self.get_active_propagation_context() - if propagation_context: - dsc = propagation_context.dynamic_sampling_context - if dsc is not None: - dsc["sample_rate"] = str(transaction.sample_rate) - if transaction._baggage: - transaction._baggage.sentry_items["sample_rate"] = str( - transaction.sample_rate - ) - - if transaction.sampled: - profile = Profile( - transaction.sampled, transaction._start_timestamp_monotonic_ns - ) - profile._set_initial_sampling_decision(sampling_context=sampling_context) - - transaction._profile = profile - - transaction._continuous_profile = try_profile_lifecycle_trace_start() - - # Typically, the profiler is set when the transaction is created. But when - # using the auto lifecycle, the profiler isn't running when the first - # transaction is started. So make sure we update the profiler id on it. - if transaction._continuous_profile is not None: - transaction.set_profiler_id(get_profiler_id()) - - # we don't bother to keep spans if we already know we're not going to - # send the transaction - max_spans = (client.options["_experiments"].get("max_spans")) or 1000 - transaction.init_span_recorder(maxlen=max_spans) - - return transaction + warnings.warn( + "The `start_transaction` method is deprecated, please use `sentry_sdk.start_span instead.`", + DeprecationWarning, + stacklevel=2, + ) + return NoOpSpan(**kwargs) - def start_span(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): - # type: (str, Any) -> Span + def start_span(self, **kwargs): + # type: (Any) -> Union[NoOpSpan, Span] """ - Start a span whose parent is the currently active span or transaction, if any. + Start a span whose parent is the currently active span, if any. The return value is a :py:class:`sentry_sdk.tracing.Span` instance, typically used as a context manager to start and stop timing in a `with` block. - Only spans contained in a transaction are sent to Sentry. Most - integrations start a transaction at the appropriate time, for example - for every incoming HTTP request. Use - :py:meth:`sentry_sdk.start_transaction` to start a new transaction when - one is not already in progress. - For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`. - - The instrumenter parameter is deprecated for user code, and it will - be removed in the next major version. Going forward, it should only - be used by the SDK itself. """ - if kwargs.get("description") is not None: - warnings.warn( - "The `description` parameter is deprecated. Please use `name` instead.", - DeprecationWarning, - stacklevel=2, - ) - - with new_scope(): - kwargs.setdefault("scope", self) - - client = self.get_client() - - configuration_instrumenter = client.options["instrumenter"] - - if instrumenter != configuration_instrumenter: - return NoOpSpan() - - # get current span or transaction - span = self.span or self.get_isolation_scope().span - - if span is None: - # New spans get the `trace_id` from the scope - if "trace_id" not in kwargs: - propagation_context = self.get_active_propagation_context() - if propagation_context is not None: - kwargs["trace_id"] = propagation_context.trace_id + return NoOpSpan(**kwargs) - span = Span(**kwargs) - else: - # Children take `trace_id`` from the parent span. - span = span.start_child(**kwargs) - - return span - - def continue_trace( - self, environ_or_headers, op=None, name=None, source=None, origin="manual" - ): - # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str], str) -> Transaction + @contextmanager + def continue_trace(self, environ_or_headers): + # type: (Dict[str, Any]) -> Generator[None, None, None] """ - Sets the propagation context from environment or headers and returns a transaction. + Sets the propagation context from environment or headers to continue an incoming trace. """ self.generate_propagation_context(environ_or_headers) - - # When we generate the propagation context, the sample_rand value is set - # if missing or invalid (we use the original value if it's valid). - # We want the transaction to use the same sample_rand value. Due to duplicated - # propagation logic in the transaction, we pass it in to avoid recomputing it - # in the transaction. - # TYPE SAFETY: self.generate_propagation_context() ensures that self._propagation_context - # is not None. - sample_rand = typing.cast( - PropagationContext, self._propagation_context - )._sample_rand() - - transaction = Transaction.continue_from_headers( - normalize_incoming_data(environ_or_headers), - _sample_rand=sample_rand, - op=op, - origin=origin, - name=name, - source=source, - ) - - return transaction + yield def capture_event(self, event, hint=None, scope=None, **scope_kwargs): # type: (Event, Optional[Hint], Optional[Scope], Any) -> Optional[str] @@ -1432,7 +1188,11 @@ def _apply_contexts_to_event(self, event, hint, options): # Add "trace" context if contexts.get("trace") is None: - if has_tracing_enabled(options) and self._span is not None: + if ( + has_tracing_enabled(options) + and self._span is not None + and self._span.is_valid + ): contexts["trace"] = self._span.get_trace_context() else: contexts["trace"] = self.get_trace_context() @@ -1482,8 +1242,8 @@ def run_event_processors(self, event, hint): if not is_check_in: # Get scopes without creating them to prevent infinite recursion - isolation_scope = _isolation_scope.get() - current_scope = _current_scope.get() + isolation_scope = self._get_isolation_scope() + current_scope = self._get_current_scope() event_processors = chain( global_event_processors, @@ -1493,7 +1253,7 @@ def run_event_processors(self, event, hint): ) for event_processor in event_processors: - new_event = event + new_event = event # type: Optional[Event] with capture_internal_exceptions(): new_event = event_processor(event, hint) if new_event is None: diff --git a/sentry_sdk/sessions.py b/sentry_sdk/sessions.py index eaeb915e7b..162023a54a 100644 --- a/sentry_sdk/sessions.py +++ b/sentry_sdk/sessions.py @@ -1,6 +1,5 @@ import os import time -import warnings from threading import Thread, Lock from contextlib import contextmanager @@ -18,75 +17,6 @@ from typing import Generator from typing import List from typing import Optional - from typing import Union - - -def is_auto_session_tracking_enabled(hub=None): - # type: (Optional[sentry_sdk.Hub]) -> Union[Any, bool, None] - """DEPRECATED: Utility function to find out if session tracking is enabled.""" - - # Internal callers should use private _is_auto_session_tracking_enabled, instead. - warnings.warn( - "This function is deprecated and will be removed in the next major release. " - "There is no public API replacement.", - DeprecationWarning, - stacklevel=2, - ) - - if hub is None: - hub = sentry_sdk.Hub.current - - should_track = hub.scope._force_auto_session_tracking - - if should_track is None: - client_options = hub.client.options if hub.client else {} - should_track = client_options.get("auto_session_tracking", False) - - return should_track - - -@contextmanager -def auto_session_tracking(hub=None, session_mode="application"): - # type: (Optional[sentry_sdk.Hub], str) -> Generator[None, None, None] - """DEPRECATED: Use track_session instead - Starts and stops a session automatically around a block. - """ - warnings.warn( - "This function is deprecated and will be removed in the next major release. " - "Use track_session instead.", - DeprecationWarning, - stacklevel=2, - ) - - if hub is None: - hub = sentry_sdk.Hub.current - with warnings.catch_warnings(): - warnings.simplefilter("ignore", DeprecationWarning) - should_track = is_auto_session_tracking_enabled(hub) - if should_track: - hub.start_session(session_mode=session_mode) - try: - yield - finally: - if should_track: - hub.end_session() - - -def is_auto_session_tracking_enabled_scope(scope): - # type: (sentry_sdk.Scope) -> bool - """ - DEPRECATED: Utility function to find out if session tracking is enabled. - """ - - warnings.warn( - "This function is deprecated and will be removed in the next major release. " - "There is no public API replacement.", - DeprecationWarning, - stacklevel=2, - ) - - # Internal callers should use private _is_auto_session_tracking_enabled, instead. - return _is_auto_session_tracking_enabled(scope) def _is_auto_session_tracking_enabled(scope): @@ -103,23 +33,6 @@ def _is_auto_session_tracking_enabled(scope): return should_track -@contextmanager -def auto_session_tracking_scope(scope, session_mode="application"): - # type: (sentry_sdk.Scope, str) -> Generator[None, None, None] - """DEPRECATED: This function is a deprecated alias for track_session. - Starts and stops a session automatically around a block. - """ - - warnings.warn( - "This function is a deprecated alias for track_session and will be removed in the next major release.", - DeprecationWarning, - stacklevel=2, - ) - - with track_session(scope, session_mode=session_mode): - yield - - @contextmanager def track_session(scope, session_mode="application"): # type: (sentry_sdk.Scope, str) -> Generator[None, None, None] diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index fc40221b9f..92ac4d7671 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -1,29 +1,59 @@ -from decimal import Decimal -import uuid +from datetime import datetime +import json import warnings -from datetime import datetime, timedelta, timezone -from enum import Enum + +from opentelemetry import trace as otel_trace, context +from opentelemetry.trace import ( + format_trace_id, + format_span_id, + Span as OtelSpan, + TraceState, + get_current_span, + INVALID_SPAN, +) +from opentelemetry.trace.status import Status, StatusCode +from opentelemetry.sdk.trace import ReadableSpan +from opentelemetry.version import __version__ as otel_version import sentry_sdk -from sentry_sdk.consts import INSTRUMENTER, SPANSTATUS, SPANDATA -from sentry_sdk.profiler.continuous_profiler import get_profiler_id +from sentry_sdk.consts import ( + DEFAULT_SPAN_NAME, + DEFAULT_SPAN_ORIGIN, + BAGGAGE_HEADER_NAME, + SENTRY_TRACE_HEADER_NAME, + SPANSTATUS, + SPANDATA, + TransactionSource, +) +from sentry_sdk.opentelemetry.consts import ( + TRACESTATE_SAMPLE_RATE_KEY, + SentrySpanAttribute, +) +from sentry_sdk.opentelemetry.utils import ( + baggage_from_trace_state, + convert_from_otel_timestamp, + convert_to_otel_timestamp, + get_trace_context, + get_trace_state, + get_sentry_meta, + serialize_trace_state, +) +from sentry_sdk.tracing_utils import get_span_status_from_http_code from sentry_sdk.utils import ( + _serialize_span_attribute, get_current_thread_meta, - is_valid_sample_rate, - logger, - nanosecond_time, + parse_version, should_be_treated_as_error, ) -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, cast if TYPE_CHECKING: - from collections.abc import Callable, Mapping, MutableMapping + from collections.abc import Callable from typing import Any from typing import Dict from typing import Iterator - from typing import List from typing import Optional from typing import overload from typing import ParamSpec @@ -31,346 +61,206 @@ from typing import Union from typing import TypeVar - from typing_extensions import TypedDict, Unpack - P = ParamSpec("P") R = TypeVar("R") - from sentry_sdk.profiler.continuous_profiler import ContinuousProfile - from sentry_sdk.profiler.transaction_profiler import Profile from sentry_sdk._types import ( - Event, - MeasurementUnit, SamplingContext, - MeasurementValue, ) - class SpanKwargs(TypedDict, total=False): - trace_id: str - """ - The trace ID of the root span. If this new span is to be the root span, - omit this parameter, and a new trace ID will be generated. - """ - - span_id: str - """The span ID of this span. If omitted, a new span ID will be generated.""" - - parent_span_id: str - """The span ID of the parent span, if applicable.""" - - same_process_as_parent: bool - """Whether this span is in the same process as the parent span.""" - - sampled: bool - """ - Whether the span should be sampled. Overrides the default sampling decision - for this span when provided. - """ - - op: str - """ - The span's operation. A list of recommended values is available here: - https://develop.sentry.dev/sdk/performance/span-operations/ - """ - - description: str - """A description of what operation is being performed within the span. This argument is DEPRECATED. Please use the `name` parameter, instead.""" - - hub: Optional["sentry_sdk.Hub"] - """The hub to use for this span. This argument is DEPRECATED. Please use the `scope` parameter, instead.""" + from sentry_sdk.tracing_utils import Baggage - status: str - """The span's status. Possible values are listed at https://develop.sentry.dev/sdk/event-payloads/span/""" +_FLAGS_CAPACITY = 10 +_OTEL_VERSION = parse_version(otel_version) - containing_transaction: Optional["Transaction"] - """The transaction that this span belongs to.""" +tracer = otel_trace.get_tracer(__name__) - start_timestamp: Optional[Union[datetime, float]] - """ - The timestamp when the span started. If omitted, the current time - will be used. - """ - scope: "sentry_sdk.Scope" - """The scope to use for this span. If not provided, we use the current scope.""" - - origin: str - """ - The origin of the span. - See https://develop.sentry.dev/sdk/performance/trace-origin/ - Default "manual". - """ - - name: str - """A string describing what operation is being performed within the span/transaction.""" - - class TransactionKwargs(SpanKwargs, total=False): - source: str - """ - A string describing the source of the transaction name. This will be used to determine the transaction's type. - See https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations for more information. - Default "custom". - """ +class NoOpSpan: + def __init__(self, **kwargs): + # type: (Any) -> None + pass - parent_sampled: bool - """Whether the parent transaction was sampled. If True this transaction will be kept, if False it will be discarded.""" + def __repr__(self): + # type: () -> str + return "<%s>" % self.__class__.__name__ - baggage: "Baggage" - """The W3C baggage header value. (see https://www.w3.org/TR/baggage/)""" + @property + def root_span(self): + # type: () -> Optional[Span] + return None - ProfileContext = TypedDict( - "ProfileContext", - { - "profiler_id": str, - }, - ) + def start_child(self, **kwargs): + # type: (**Any) -> NoOpSpan + return NoOpSpan() -BAGGAGE_HEADER_NAME = "baggage" -SENTRY_TRACE_HEADER_NAME = "sentry-trace" + def to_traceparent(self): + # type: () -> str + return "" + def to_baggage(self): + # type: () -> Optional[Baggage] + return None -# Transaction source -# see https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations -class TransactionSource(str, Enum): - COMPONENT = "component" - CUSTOM = "custom" - ROUTE = "route" - TASK = "task" - URL = "url" - VIEW = "view" + def get_baggage(self): + # type: () -> Optional[Baggage] + return None - def __str__(self): - # type: () -> str - return self.value + def iter_headers(self): + # type: () -> Iterator[Tuple[str, str]] + return iter(()) + def set_tag(self, key, value): + # type: (str, Any) -> None + pass -# These are typically high cardinality and the server hates them -LOW_QUALITY_TRANSACTION_SOURCES = [ - TransactionSource.URL, -] + def set_data(self, key, value): + # type: (str, Any) -> None + pass -SOURCE_FOR_STYLE = { - "endpoint": TransactionSource.COMPONENT, - "function_name": TransactionSource.COMPONENT, - "handler_name": TransactionSource.COMPONENT, - "method_and_path_pattern": TransactionSource.ROUTE, - "path": TransactionSource.URL, - "route_name": TransactionSource.COMPONENT, - "route_pattern": TransactionSource.ROUTE, - "uri_template": TransactionSource.ROUTE, - "url": TransactionSource.ROUTE, -} + def set_status(self, value): + # type: (str) -> None + pass + def set_http_status(self, http_status): + # type: (int) -> None + pass -def get_span_status_from_http_code(http_status_code): - # type: (int) -> str - """ - Returns the Sentry status corresponding to the given HTTP status code. + def is_success(self): + # type: () -> bool + return True - See: https://develop.sentry.dev/sdk/event-payloads/contexts/#trace-context - """ - if http_status_code < 400: - return SPANSTATUS.OK - - elif 400 <= http_status_code < 500: - if http_status_code == 403: - return SPANSTATUS.PERMISSION_DENIED - elif http_status_code == 404: - return SPANSTATUS.NOT_FOUND - elif http_status_code == 429: - return SPANSTATUS.RESOURCE_EXHAUSTED - elif http_status_code == 413: - return SPANSTATUS.FAILED_PRECONDITION - elif http_status_code == 401: - return SPANSTATUS.UNAUTHENTICATED - elif http_status_code == 409: - return SPANSTATUS.ALREADY_EXISTS - else: - return SPANSTATUS.INVALID_ARGUMENT - - elif 500 <= http_status_code < 600: - if http_status_code == 504: - return SPANSTATUS.DEADLINE_EXCEEDED - elif http_status_code == 501: - return SPANSTATUS.UNIMPLEMENTED - elif http_status_code == 503: - return SPANSTATUS.UNAVAILABLE - else: - return SPANSTATUS.INTERNAL_ERROR + def to_json(self): + # type: () -> Dict[str, Any] + return {} - return SPANSTATUS.UNKNOWN_ERROR + def get_trace_context(self): + # type: () -> Any + return {} + def get_profile_context(self): + # type: () -> Any + return {} -class _SpanRecorder: - """Limits the number of spans recorded in a transaction.""" + def finish( + self, + end_timestamp=None, # type: Optional[Union[float, datetime]] + ): + # type: (...) -> None + pass - __slots__ = ("maxlen", "spans", "dropped_spans") + def set_context(self, key, value): + # type: (str, dict[str, Any]) -> None + pass - def __init__(self, maxlen): + def init_span_recorder(self, maxlen): # type: (int) -> None - # FIXME: this is `maxlen - 1` only to preserve historical behavior - # enforced by tests. - # Either this should be changed to `maxlen` or the JS SDK implementation - # should be changed to match a consistent interpretation of what maxlen - # limits: either transaction+spans or only child spans. - self.maxlen = maxlen - 1 - self.spans = [] # type: List[Span] - self.dropped_spans = 0 # type: int - - def add(self, span): - # type: (Span) -> None - if len(self.spans) > self.maxlen: - span._span_recorder = None - self.dropped_spans += 1 - else: - self.spans.append(span) + pass + + def _set_initial_sampling_decision(self, sampling_context): + # type: (SamplingContext) -> None + pass class Span: - """A span holds timing information of a block of code. - Spans can have multiple child spans thus forming a span tree. - - :param trace_id: The trace ID of the root span. If this new span is to be the root span, - omit this parameter, and a new trace ID will be generated. - :param span_id: The span ID of this span. If omitted, a new span ID will be generated. - :param parent_span_id: The span ID of the parent span, if applicable. - :param same_process_as_parent: Whether this span is in the same process as the parent span. - :param sampled: Whether the span should be sampled. Overrides the default sampling decision - for this span when provided. - :param op: The span's operation. A list of recommended values is available here: - https://develop.sentry.dev/sdk/performance/span-operations/ - :param description: A description of what operation is being performed within the span. - - .. deprecated:: 2.15.0 - Please use the `name` parameter, instead. - :param name: A string describing what operation is being performed within the span. - :param hub: The hub to use for this span. - - .. deprecated:: 2.0.0 - Please use the `scope` parameter, instead. - :param status: The span's status. Possible values are listed at - https://develop.sentry.dev/sdk/event-payloads/span/ - :param containing_transaction: The transaction that this span belongs to. - :param start_timestamp: The timestamp when the span started. If omitted, the current time - will be used. - :param scope: The scope to use for this span. If not provided, we use the current scope. """ - - __slots__ = ( - "trace_id", - "span_id", - "parent_span_id", - "same_process_as_parent", - "sampled", - "op", - "description", - "_measurements", - "start_timestamp", - "_start_timestamp_monotonic_ns", - "status", - "timestamp", - "_tags", - "_data", - "_span_recorder", - "hub", - "_context_manager_state", - "_containing_transaction", - "_local_aggregator", - "scope", - "origin", - "name", - "_flags", - "_flags_capacity", - ) + OTel span wrapper providing compatibility with the old span interface. + """ def __init__( self, - trace_id=None, # type: Optional[str] - span_id=None, # type: Optional[str] - parent_span_id=None, # type: Optional[str] - same_process_as_parent=True, # type: bool - sampled=None, # type: Optional[bool] + *, op=None, # type: Optional[str] description=None, # type: Optional[str] - hub=None, # type: Optional[sentry_sdk.Hub] # deprecated status=None, # type: Optional[str] - containing_transaction=None, # type: Optional[Transaction] + sampled=None, # type: Optional[bool] start_timestamp=None, # type: Optional[Union[datetime, float]] - scope=None, # type: Optional[sentry_sdk.Scope] - origin="manual", # type: str + origin=None, # type: Optional[str] name=None, # type: Optional[str] + source=TransactionSource.CUSTOM, # type: str + attributes=None, # type: Optional[dict[str, Any]] + only_if_parent=False, # type: bool + parent_span=None, # type: Optional[Span] + otel_span=None, # type: Optional[OtelSpan] + span=None, # type: Optional[Span] ): # type: (...) -> None - self.trace_id = trace_id or uuid.uuid4().hex - self.span_id = span_id or uuid.uuid4().hex[16:] - self.parent_span_id = parent_span_id - self.same_process_as_parent = same_process_as_parent - self.sampled = sampled - self.op = op - self.description = name or description - self.status = status - self.hub = hub # backwards compatibility - self.scope = scope - self.origin = origin - self._measurements = {} # type: Dict[str, MeasurementValue] - self._tags = {} # type: MutableMapping[str, str] - self._data = {} # type: Dict[str, Any] - self._containing_transaction = containing_transaction - self._flags = {} # type: Dict[str, bool] - self._flags_capacity = 10 - - if hub is not None: - warnings.warn( - "The `hub` parameter is deprecated. Please use `scope` instead.", - DeprecationWarning, - stacklevel=2, - ) + """ + If otel_span is passed explicitly, just acts as a proxy. - self.scope = self.scope or hub.scope + If span is passed explicitly, use it. The only purpose of this param + if backwards compatibility with start_transaction(transaction=...). - if start_timestamp is None: - start_timestamp = datetime.now(timezone.utc) - elif isinstance(start_timestamp, float): - start_timestamp = datetime.fromtimestamp(start_timestamp, timezone.utc) - self.start_timestamp = start_timestamp - try: - # profiling depends on this value and requires that - # it is measured in nanoseconds - self._start_timestamp_monotonic_ns = nanosecond_time() - except AttributeError: - pass + If only_if_parent is True, just return an INVALID_SPAN + and avoid instrumentation if there's no active parent span. + """ + if otel_span is not None: + self._otel_span = otel_span + elif span is not None: + self._otel_span = span._otel_span + else: + skip_span = False + if only_if_parent and parent_span is None: + parent_span_context = get_current_span().get_span_context() + skip_span = ( + not parent_span_context.is_valid or parent_span_context.is_remote + ) - #: End timestamp of span - self.timestamp = None # type: Optional[datetime] + if skip_span: + self._otel_span = INVALID_SPAN + else: - self._span_recorder = None # type: Optional[_SpanRecorder] - self._local_aggregator = None # type: Optional[LocalAggregator] + if start_timestamp is not None: + # OTel timestamps have nanosecond precision + start_timestamp = convert_to_otel_timestamp(start_timestamp) + + span_name = name or description or op or DEFAULT_SPAN_NAME + + # Prepopulate some attrs so that they're accessible in traces_sampler + attributes = attributes or {} + if op is not None: + attributes[SentrySpanAttribute.OP] = op + if source is not None: + attributes[SentrySpanAttribute.SOURCE] = source + if description is not None: + attributes[SentrySpanAttribute.DESCRIPTION] = description + if sampled is not None: + attributes[SentrySpanAttribute.CUSTOM_SAMPLED] = sampled + + parent_context = None + if parent_span is not None: + parent_context = otel_trace.set_span_in_context( + parent_span._otel_span + ) + + self._otel_span = tracer.start_span( + span_name, + context=parent_context, + start_time=start_timestamp, + attributes=attributes, + ) - self.update_active_thread() - self.set_profiler_id(get_profiler_id()) + self.origin = origin or DEFAULT_SPAN_ORIGIN + self.description = description + self.name = span_name - # TODO this should really live on the Transaction class rather than the Span - # class - def init_span_recorder(self, maxlen): - # type: (int) -> None - if self._span_recorder is None: - self._span_recorder = _SpanRecorder(maxlen) + if status is not None: + self.set_status(status) - def _get_local_aggregator(self): - # type: (...) -> LocalAggregator - rv = self._local_aggregator - if rv is None: - rv = self._local_aggregator = LocalAggregator() - return rv + def __eq__(self, other): + # type: (object) -> bool + if not isinstance(other, Span): + return False + return self._otel_span == other._otel_span def __repr__(self): # type: () -> str return ( - "<%s(op=%r, description:%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, origin=%r)>" + "<%s(op=%r, name:%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, origin=%r)>" % ( self.__class__.__name__, self.op, - self.description, + self.name, self.trace_id, self.span_id, self.parent_span_id, @@ -381,194 +271,172 @@ def __repr__(self): def __enter__(self): # type: () -> Span - scope = self.scope or sentry_sdk.get_current_scope() - old_span = scope.span - scope.span = self - self._context_manager_state = (scope, old_span) + # XXX use_span? https://github.com/open-telemetry/opentelemetry-python/blob/3836da8543ce9751051e38a110c0468724042e62/opentelemetry-api/src/opentelemetry/trace/__init__.py#L547 + # + # create a Context object with parent set as current span + ctx = otel_trace.set_span_in_context(self._otel_span) + # set as the implicit current context + self._ctx_token = context.attach(ctx) + + # get the new scope that was forked on context.attach + self.scope = sentry_sdk.get_current_scope() + self.scope.span = self + return self def __exit__(self, ty, value, tb): # type: (Optional[Any], Optional[Any], Optional[Any]) -> None if value is not None and should_be_treated_as_error(ty, value): self.set_status(SPANSTATUS.INTERNAL_ERROR) + else: + status_unset = ( + hasattr(self._otel_span, "status") + and self._otel_span.status.status_code == StatusCode.UNSET + ) + if status_unset: + self.set_status(SPANSTATUS.OK) - scope, old_span = self._context_manager_state - del self._context_manager_state - self.finish(scope) - scope.span = old_span + self.finish() + context.detach(self._ctx_token) + del self._ctx_token @property - def containing_transaction(self): - # type: () -> Optional[Transaction] - """The ``Transaction`` that this span belongs to. - The ``Transaction`` is the root of the span tree, - so one could also think of this ``Transaction`` as the "root span".""" - - # this is a getter rather than a regular attribute so that transactions - # can return `self` here instead (as a way to prevent them circularly - # referencing themselves) - return self._containing_transaction - - def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): - # type: (str, **Any) -> Span - """ - Start a sub-span from the current span or transaction. - - Takes the same arguments as the initializer of :py:class:`Span`. The - trace id, sampling decision, transaction pointer, and span recorder are - inherited from the current span/transaction. - - The instrumenter parameter is deprecated for user code, and it will - be removed in the next major version. Going forward, it should only - be used by the SDK itself. - """ - if kwargs.get("description") is not None: - warnings.warn( - "The `description` parameter is deprecated. Please use `name` instead.", - DeprecationWarning, - stacklevel=2, - ) + def description(self): + # type: () -> Optional[str] + return self.get_attribute(SentrySpanAttribute.DESCRIPTION) - configuration_instrumenter = sentry_sdk.get_client().options["instrumenter"] - - if instrumenter != configuration_instrumenter: - return NoOpSpan() + @description.setter + def description(self, value): + # type: (Optional[str]) -> None + self.set_attribute(SentrySpanAttribute.DESCRIPTION, value) - kwargs.setdefault("sampled", self.sampled) + @property + def origin(self): + # type: () -> Optional[str] + return self.get_attribute(SentrySpanAttribute.ORIGIN) - child = Span( - trace_id=self.trace_id, - parent_span_id=self.span_id, - containing_transaction=self.containing_transaction, - **kwargs, - ) + @origin.setter + def origin(self, value): + # type: (Optional[str]) -> None + self.set_attribute(SentrySpanAttribute.ORIGIN, value) - span_recorder = ( - self.containing_transaction and self.containing_transaction._span_recorder + @property + def root_span(self): + # type: () -> Optional[Span] + root_otel_span = cast( + "Optional[OtelSpan]", get_sentry_meta(self._otel_span, "root_span") ) - if span_recorder: - span_recorder.add(child) + return Span(otel_span=root_otel_span) if root_otel_span else None - return child + @property + def is_root_span(self): + # type: () -> bool + return self.root_span == self - @classmethod - def continue_from_environ( - cls, - environ, # type: Mapping[str, str] - **kwargs, # type: Any - ): - # type: (...) -> Transaction - """ - Create a Transaction with the given params, then add in data pulled from - the ``sentry-trace`` and ``baggage`` headers from the environ (if any) - before returning the Transaction. + @property + def parent_span_id(self): + # type: () -> Optional[str] + if ( + not isinstance(self._otel_span, ReadableSpan) + or self._otel_span.parent is None + ): + return None + return format_span_id(self._otel_span.parent.span_id) - This is different from :py:meth:`~sentry_sdk.tracing.Span.continue_from_headers` - in that it assumes header names in the form ``HTTP_HEADER_NAME`` - - such as you would get from a WSGI/ASGI environ - - rather than the form ``header-name``. + @property + def trace_id(self): + # type: () -> str + return format_trace_id(self._otel_span.get_span_context().trace_id) - :param environ: The ASGI/WSGI environ to pull information from. - """ - if cls is Span: - logger.warning( - "Deprecated: use Transaction.continue_from_environ " - "instead of Span.continue_from_environ." - ) - return Transaction.continue_from_headers(EnvironHeaders(environ), **kwargs) + @property + def span_id(self): + # type: () -> str + return format_span_id(self._otel_span.get_span_context().span_id) - @classmethod - def continue_from_headers( - cls, - headers, # type: Mapping[str, str] - *, - _sample_rand=None, # type: Optional[str] - **kwargs, # type: Any - ): - # type: (...) -> Transaction - """ - Create a transaction with the given params (including any data pulled from - the ``sentry-trace`` and ``baggage`` headers). + @property + def is_valid(self): + # type: () -> bool + return self._otel_span.get_span_context().is_valid and isinstance( + self._otel_span, ReadableSpan + ) - :param headers: The dictionary with the HTTP headers to pull information from. - :param _sample_rand: If provided, we override the sample_rand value from the - incoming headers with this value. (internal use only) - """ - # TODO move this to the Transaction class - if cls is Span: - logger.warning( - "Deprecated: use Transaction.continue_from_headers " - "instead of Span.continue_from_headers." - ) + @property + def sampled(self): + # type: () -> Optional[bool] + return self._otel_span.get_span_context().trace_flags.sampled - # TODO-neel move away from this kwargs stuff, it's confusing and opaque - # make more explicit - baggage = Baggage.from_incoming_header( - headers.get(BAGGAGE_HEADER_NAME), _sample_rand=_sample_rand + @property + def sample_rate(self): + # type: () -> Optional[float] + sample_rate = self._otel_span.get_span_context().trace_state.get( + TRACESTATE_SAMPLE_RATE_KEY ) - kwargs.update({BAGGAGE_HEADER_NAME: baggage}) + return float(sample_rate) if sample_rate is not None else None - sentrytrace_kwargs = extract_sentrytrace_data( - headers.get(SENTRY_TRACE_HEADER_NAME) - ) + @property + def op(self): + # type: () -> Optional[str] + return self.get_attribute(SentrySpanAttribute.OP) - if sentrytrace_kwargs is not None: - kwargs.update(sentrytrace_kwargs) + @op.setter + def op(self, value): + # type: (Optional[str]) -> None + self.set_attribute(SentrySpanAttribute.OP, value) - # If there's an incoming sentry-trace but no incoming baggage header, - # for instance in traces coming from older SDKs, - # baggage will be empty and immutable and won't be populated as head SDK. - baggage.freeze() + @property + def name(self): + # type: () -> Optional[str] + return self.get_attribute(SentrySpanAttribute.NAME) - transaction = Transaction(**kwargs) - transaction.same_process_as_parent = False + @name.setter + def name(self, value): + # type: (Optional[str]) -> None + self.set_attribute(SentrySpanAttribute.NAME, value) - return transaction + @property + def source(self): + # type: () -> str + return ( + self.get_attribute(SentrySpanAttribute.SOURCE) or TransactionSource.CUSTOM + ) - def iter_headers(self): - # type: () -> Iterator[Tuple[str, str]] - """ - Creates a generator which returns the span's ``sentry-trace`` and ``baggage`` headers. - If the span's containing transaction doesn't yet have a ``baggage`` value, - this will cause one to be generated and stored. - """ - if not self.containing_transaction: - # Do not propagate headers if there is no containing transaction. Otherwise, this - # span ends up being the root span of a new trace, and since it does not get sent - # to Sentry, the trace will be missing a root transaction. The dynamic sampling - # context will also be missing, breaking dynamic sampling & traces. - return + @source.setter + def source(self, value): + # type: (str) -> None + self.set_attribute(SentrySpanAttribute.SOURCE, value) - yield SENTRY_TRACE_HEADER_NAME, self.to_traceparent() + @property + def start_timestamp(self): + # type: () -> Optional[datetime] + if not isinstance(self._otel_span, ReadableSpan): + return None - baggage = self.containing_transaction.get_baggage().serialize() - if baggage: - yield BAGGAGE_HEADER_NAME, baggage + start_time = self._otel_span.start_time + if start_time is None: + return None - @classmethod - def from_traceparent( - cls, - traceparent, # type: Optional[str] - **kwargs, # type: Any - ): - # type: (...) -> Optional[Transaction] - """ - DEPRECATED: Use :py:meth:`sentry_sdk.tracing.Span.continue_from_headers`. + return convert_from_otel_timestamp(start_time) - Create a ``Transaction`` with the given params, then add in data pulled from - the given ``sentry-trace`` header value before returning the ``Transaction``. - """ - logger.warning( - "Deprecated: Use Transaction.continue_from_headers(headers, **kwargs) " - "instead of from_traceparent(traceparent, **kwargs)" - ) + @property + def timestamp(self): + # type: () -> Optional[datetime] + if not isinstance(self._otel_span, ReadableSpan): + return None - if not traceparent: + end_time = self._otel_span.end_time + if end_time is None: return None - return cls.continue_from_headers( - {SENTRY_TRACE_HEADER_NAME: traceparent}, **kwargs - ) + return convert_from_otel_timestamp(end_time) + + def start_child(self, **kwargs): + # type: (**Any) -> Span + return Span(parent_span=self, **kwargs) + + def iter_headers(self): + # type: () -> Iterator[Tuple[str, str]] + yield SENTRY_TRACE_HEADER_NAME, self.to_traceparent() + yield BAGGAGE_HEADER_NAME, serialize_trace_state(self.trace_state) def to_traceparent(self): # type: () -> str @@ -585,748 +453,149 @@ def to_traceparent(self): return traceparent + @property + def trace_state(self): + # type: () -> TraceState + return get_trace_state(self._otel_span) + def to_baggage(self): - # type: () -> Optional[Baggage] - """Returns the :py:class:`~sentry_sdk.tracing_utils.Baggage` - associated with this ``Span``, if any. (Taken from the root of the span tree.) - """ - if self.containing_transaction: - return self.containing_transaction.get_baggage() - return None + # type: () -> Baggage + return self.get_baggage() + + def get_baggage(self): + # type: () -> Baggage + return baggage_from_trace_state(self.trace_state) def set_tag(self, key, value): # type: (str, Any) -> None - self._tags[key] = value + self.set_attribute(f"{SentrySpanAttribute.TAG}.{key}", value) def set_data(self, key, value): # type: (str, Any) -> None - self._data[key] = value - - def set_flag(self, flag, result): - # type: (str, bool) -> None - if len(self._flags) < self._flags_capacity: - self._flags[flag] = result - - def set_status(self, value): - # type: (str) -> None - self.status = value - - def set_measurement(self, name, value, unit=""): - # type: (str, float, MeasurementUnit) -> None - """ - .. deprecated:: 2.28.0 - This function is deprecated and will be removed in the next major release. - """ - warnings.warn( - "`set_measurement()` is deprecated and will be removed in the next major version. Please use `set_data()` instead.", + "`Span.set_data` is deprecated. Please use `Span.set_attribute` instead.", DeprecationWarning, stacklevel=2, ) - self._measurements[name] = {"value": value, "unit": unit} - - def set_thread(self, thread_id, thread_name): - # type: (Optional[int], Optional[str]) -> None - if thread_id is not None: - self.set_data(SPANDATA.THREAD_ID, str(thread_id)) + # TODO-neel-potel we cannot add dicts here + self.set_attribute(key, value) - if thread_name is not None: - self.set_data(SPANDATA.THREAD_NAME, thread_name) - - def set_profiler_id(self, profiler_id): - # type: (Optional[str]) -> None - if profiler_id is not None: - self.set_data(SPANDATA.PROFILER_ID, profiler_id) + def get_attribute(self, name): + # type: (str) -> Optional[Any] + if ( + not isinstance(self._otel_span, ReadableSpan) + or not self._otel_span.attributes + ): + return None + return self._otel_span.attributes.get(name) - def set_http_status(self, http_status): - # type: (int) -> None - self.set_tag( - "http.status_code", str(http_status) - ) # we keep this for backwards compatibility - self.set_data(SPANDATA.HTTP_STATUS_CODE, http_status) - self.set_status(get_span_status_from_http_code(http_status)) + def set_attribute(self, key, value): + # type: (str, Any) -> None + # otel doesn't support None as values, preferring to not set the key + # at all instead + if value is None: + return + serialized_value = _serialize_span_attribute(value) + if serialized_value is None: + return - def is_success(self): - # type: () -> bool - return self.status == "ok" + self._otel_span.set_attribute(key, serialized_value) - def finish(self, scope=None, end_timestamp=None): - # type: (Optional[sentry_sdk.Scope], Optional[Union[float, datetime]]) -> Optional[str] + @property + def status(self): + # type: () -> Optional[str] """ - Sets the end timestamp of the span. - - Additionally it also creates a breadcrumb from the span, - if the span represents a database or HTTP request. - - :param scope: The scope to use for this transaction. - If not provided, the current scope will be used. - :param end_timestamp: Optional timestamp that should - be used as timestamp instead of the current time. - - :return: Always ``None``. The type is ``Optional[str]`` to match - the return value of :py:meth:`sentry_sdk.tracing.Transaction.finish`. + Return the Sentry `SPANSTATUS` corresponding to the underlying OTel status. + Because differences in possible values in OTel `StatusCode` and + Sentry `SPANSTATUS` it can not be guaranteed that the status + set in `set_status()` will be the same as the one returned here. """ - if self.timestamp is not None: - # This span is already finished, ignore. + if not isinstance(self._otel_span, ReadableSpan): return None - try: - if end_timestamp: - if isinstance(end_timestamp, float): - end_timestamp = datetime.fromtimestamp(end_timestamp, timezone.utc) - self.timestamp = end_timestamp - else: - elapsed = nanosecond_time() - self._start_timestamp_monotonic_ns - self.timestamp = self.start_timestamp + timedelta( - microseconds=elapsed / 1000 - ) - except AttributeError: - self.timestamp = datetime.now(timezone.utc) - - scope = scope or sentry_sdk.get_current_scope() - maybe_create_breadcrumbs_from_span(scope, self) - - return None - - def to_json(self): - # type: () -> Dict[str, Any] - """Returns a JSON-compatible representation of the span.""" - - rv = { - "trace_id": self.trace_id, - "span_id": self.span_id, - "parent_span_id": self.parent_span_id, - "same_process_as_parent": self.same_process_as_parent, - "op": self.op, - "description": self.description, - "start_timestamp": self.start_timestamp, - "timestamp": self.timestamp, - "origin": self.origin, - } # type: Dict[str, Any] - - if self.status: - self._tags["status"] = self.status - - if self._local_aggregator is not None: - metrics_summary = self._local_aggregator.to_json() - if metrics_summary: - rv["_metrics_summary"] = metrics_summary - - if len(self._measurements) > 0: - rv["measurements"] = self._measurements - - tags = self._tags - if tags: - rv["tags"] = tags - - data = {} - data.update(self._flags) - data.update(self._data) - if data: - rv["data"] = data - - return rv + if self._otel_span.status.status_code == StatusCode.UNSET: + return None + elif self._otel_span.status.status_code == StatusCode.OK: + return SPANSTATUS.OK + else: + return SPANSTATUS.UNKNOWN_ERROR - def get_trace_context(self): - # type: () -> Any - rv = { - "trace_id": self.trace_id, - "span_id": self.span_id, - "parent_span_id": self.parent_span_id, - "op": self.op, - "description": self.description, - "origin": self.origin, - } # type: Dict[str, Any] - if self.status: - rv["status"] = self.status - - if self.containing_transaction: - rv["dynamic_sampling_context"] = ( - self.containing_transaction.get_baggage().dynamic_sampling_context() - ) + def set_status(self, status): + # type: (str) -> None + if status == SPANSTATUS.OK: + otel_status = StatusCode.OK + otel_description = None + else: + otel_status = StatusCode.ERROR + otel_description = status - data = {} + if _OTEL_VERSION is None or _OTEL_VERSION >= (1, 12, 0): + self._otel_span.set_status(otel_status, otel_description) + else: + self._otel_span.set_status(Status(otel_status, otel_description)) - thread_id = self._data.get(SPANDATA.THREAD_ID) + def set_thread(self, thread_id, thread_name): + # type: (Optional[int], Optional[str]) -> None if thread_id is not None: - data["thread.id"] = thread_id - - thread_name = self._data.get(SPANDATA.THREAD_NAME) - if thread_name is not None: - data["thread.name"] = thread_name - - if data: - rv["data"] = data - - return rv + self.set_attribute(SPANDATA.THREAD_ID, str(thread_id)) - def get_profile_context(self): - # type: () -> Optional[ProfileContext] - profiler_id = self._data.get(SPANDATA.PROFILER_ID) - if profiler_id is None: - return None - - return { - "profiler_id": profiler_id, - } + if thread_name is not None: + self.set_attribute(SPANDATA.THREAD_NAME, thread_name) def update_active_thread(self): # type: () -> None thread_id, thread_name = get_current_thread_meta() self.set_thread(thread_id, thread_name) - -class Transaction(Span): - """The Transaction is the root element that holds all the spans - for Sentry performance instrumentation. - - :param name: Identifier of the transaction. - Will show up in the Sentry UI. - :param parent_sampled: Whether the parent transaction was sampled. - If True this transaction will be kept, if False it will be discarded. - :param baggage: The W3C baggage header value. - (see https://www.w3.org/TR/baggage/) - :param source: A string describing the source of the transaction name. - This will be used to determine the transaction's type. - See https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations - for more information. Default "custom". - :param kwargs: Additional arguments to be passed to the Span constructor. - See :py:class:`sentry_sdk.tracing.Span` for available arguments. - """ - - __slots__ = ( - "name", - "source", - "parent_sampled", - # used to create baggage value for head SDKs in dynamic sampling - "sample_rate", - "_measurements", - "_contexts", - "_profile", - "_continuous_profile", - "_baggage", - "_sample_rand", - ) - - def __init__( # type: ignore[misc] - self, - name="", # type: str - parent_sampled=None, # type: Optional[bool] - baggage=None, # type: Optional[Baggage] - source=TransactionSource.CUSTOM, # type: str - **kwargs, # type: Unpack[SpanKwargs] - ): - # type: (...) -> None - - super().__init__(**kwargs) - - self.name = name - self.source = source - self.sample_rate = None # type: Optional[float] - self.parent_sampled = parent_sampled - self._measurements = {} # type: Dict[str, MeasurementValue] - self._contexts = {} # type: Dict[str, Any] - self._profile = None # type: Optional[Profile] - self._continuous_profile = None # type: Optional[ContinuousProfile] - self._baggage = baggage - - baggage_sample_rand = ( - None if self._baggage is None else self._baggage._sample_rand() - ) - if baggage_sample_rand is not None: - self._sample_rand = baggage_sample_rand - else: - self._sample_rand = _generate_sample_rand(self.trace_id) - - def __repr__(self): - # type: () -> str - return ( - "<%s(name=%r, op=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, source=%r, origin=%r)>" - % ( - self.__class__.__name__, - self.name, - self.op, - self.trace_id, - self.span_id, - self.parent_span_id, - self.sampled, - self.source, - self.origin, - ) - ) - - def _possibly_started(self): - # type: () -> bool - """Returns whether the transaction might have been started. - - If this returns False, we know that the transaction was not started - with sentry_sdk.start_transaction, and therefore the transaction will - be discarded. - """ - - # We must explicitly check self.sampled is False since self.sampled can be None - return self._span_recorder is not None or self.sampled is False - - def __enter__(self): - # type: () -> Transaction - if not self._possibly_started(): - logger.debug( - "Transaction was entered without being started with sentry_sdk.start_transaction." - "The transaction will not be sent to Sentry. To fix, start the transaction by" - "passing it to sentry_sdk.start_transaction." - ) - - super().__enter__() - - if self._profile is not None: - self._profile.__enter__() - - return self - - def __exit__(self, ty, value, tb): - # type: (Optional[Any], Optional[Any], Optional[Any]) -> None - if self._profile is not None: - self._profile.__exit__(ty, value, tb) - - if self._continuous_profile is not None: - self._continuous_profile.stop() - - super().__exit__(ty, value, tb) - - @property - def containing_transaction(self): - # type: () -> Transaction - """The root element of the span tree. - In the case of a transaction it is the transaction itself. - """ - - # Transactions (as spans) belong to themselves (as transactions). This - # is a getter rather than a regular attribute to avoid having a circular - # reference. - return self - - def _get_scope_from_finish_args( - self, - scope_arg, # type: Optional[Union[sentry_sdk.Scope, sentry_sdk.Hub]] - hub_arg, # type: Optional[Union[sentry_sdk.Scope, sentry_sdk.Hub]] - ): - # type: (...) -> Optional[sentry_sdk.Scope] - """ - Logic to get the scope from the arguments passed to finish. This - function exists for backwards compatibility with the old finish. - - TODO: Remove this function in the next major version. - """ - scope_or_hub = scope_arg - if hub_arg is not None: - warnings.warn( - "The `hub` parameter is deprecated. Please use the `scope` parameter, instead.", - DeprecationWarning, - stacklevel=3, - ) - - scope_or_hub = hub_arg - - if isinstance(scope_or_hub, sentry_sdk.Hub): - warnings.warn( - "Passing a Hub to finish is deprecated. Please pass a Scope, instead.", - DeprecationWarning, - stacklevel=3, - ) - - return scope_or_hub.scope - - return scope_or_hub - - def finish( - self, - scope=None, # type: Optional[sentry_sdk.Scope] - end_timestamp=None, # type: Optional[Union[float, datetime]] - *, - hub=None, # type: Optional[sentry_sdk.Hub] - ): - # type: (...) -> Optional[str] - """Finishes the transaction and sends it to Sentry. - All finished spans in the transaction will also be sent to Sentry. - - :param scope: The Scope to use for this transaction. - If not provided, the current Scope will be used. - :param end_timestamp: Optional timestamp that should - be used as timestamp instead of the current time. - :param hub: The hub to use for this transaction. - This argument is DEPRECATED. Please use the `scope` - parameter, instead. - - :return: The event ID if the transaction was sent to Sentry, - otherwise None. - """ - if self.timestamp is not None: - # This transaction is already finished, ignore. - return None - - # For backwards compatibility, we must handle the case where `scope` - # or `hub` could both either be a `Scope` or a `Hub`. - scope = self._get_scope_from_finish_args( - scope, hub - ) # type: Optional[sentry_sdk.Scope] - - scope = scope or self.scope or sentry_sdk.get_current_scope() - client = sentry_sdk.get_client() - - if not client.is_active(): - # We have no active client and therefore nowhere to send this transaction. - return None - - if self._span_recorder is None: - # Explicit check against False needed because self.sampled might be None - if self.sampled is False: - logger.debug("Discarding transaction because sampled = False") - else: - logger.debug( - "Discarding transaction because it was not started with sentry_sdk.start_transaction" - ) - - # This is not entirely accurate because discards here are not - # exclusively based on sample rate but also traces sampler, but - # we handle this the same here. - if client.transport and has_tracing_enabled(client.options): - if client.monitor and client.monitor.downsample_factor > 0: - reason = "backpressure" - else: - reason = "sample_rate" - - client.transport.record_lost_event(reason, data_category="transaction") - - # Only one span (the transaction itself) is discarded, since we did not record any spans here. - client.transport.record_lost_event(reason, data_category="span") - return None - - if not self.name: - logger.warning( - "Transaction has no name, falling back to ``." - ) - self.name = "" - - super().finish(scope, end_timestamp) - - if not self.sampled: - # At this point a `sampled = None` should have already been resolved - # to a concrete decision. - if self.sampled is None: - logger.warning("Discarding transaction without sampling decision.") - - return None - - finished_spans = [ - span.to_json() - for span in self._span_recorder.spans - if span.timestamp is not None - ] - - len_diff = len(self._span_recorder.spans) - len(finished_spans) - dropped_spans = len_diff + self._span_recorder.dropped_spans - - # we do this to break the circular reference of transaction -> span - # recorder -> span -> containing transaction (which is where we started) - # before either the spans or the transaction goes out of scope and has - # to be garbage collected - self._span_recorder = None - - contexts = {} - contexts.update(self._contexts) - contexts.update({"trace": self.get_trace_context()}) - profile_context = self.get_profile_context() - if profile_context is not None: - contexts.update({"profile": profile_context}) - - event = { - "type": "transaction", - "transaction": self.name, - "transaction_info": {"source": self.source}, - "contexts": contexts, - "tags": self._tags, - "timestamp": self.timestamp, - "start_timestamp": self.start_timestamp, - "spans": finished_spans, - } # type: Event - - if dropped_spans > 0: - event["_dropped_spans"] = dropped_spans - - if self._profile is not None and self._profile.valid(): - event["profile"] = self._profile - self._profile = None - - event["measurements"] = self._measurements - - # This is here since `to_json` is not invoked. This really should - # be gone when we switch to onlyspans. - if self._local_aggregator is not None: - metrics_summary = self._local_aggregator.to_json() - if metrics_summary: - event["_metrics_summary"] = metrics_summary - - return scope.capture_event(event) - - def set_measurement(self, name, value, unit=""): - # type: (str, float, MeasurementUnit) -> None - """ - .. deprecated:: 2.28.0 - This function is deprecated and will be removed in the next major release. - """ - - warnings.warn( - "`set_measurement()` is deprecated and will be removed in the next major version. Please use `set_data()` instead.", - DeprecationWarning, - stacklevel=2, - ) - self._measurements[name] = {"value": value, "unit": unit} - - def set_context(self, key, value): - # type: (str, dict[str, Any]) -> None - """Sets a context. Transactions can have multiple contexts - and they should follow the format described in the "Contexts Interface" - documentation. - - :param key: The name of the context. - :param value: The information about the context. - """ - self._contexts[key] = value - - def set_http_status(self, http_status): - # type: (int) -> None - """Sets the status of the Transaction according to the given HTTP status. - - :param http_status: The HTTP status code.""" - super().set_http_status(http_status) - self.set_context("response", {"status_code": http_status}) - - def to_json(self): - # type: () -> Dict[str, Any] - """Returns a JSON-compatible representation of the transaction.""" - rv = super().to_json() - - rv["name"] = self.name - rv["source"] = self.source - rv["sampled"] = self.sampled - - return rv - - def get_trace_context(self): - # type: () -> Any - trace_context = super().get_trace_context() - - if self._data: - trace_context["data"] = self._data - - return trace_context - - def get_baggage(self): - # type: () -> Baggage - """Returns the :py:class:`~sentry_sdk.tracing_utils.Baggage` - associated with the Transaction. - - The first time a new baggage with Sentry items is made, - it will be frozen.""" - if not self._baggage or self._baggage.mutable: - self._baggage = Baggage.populate_from_transaction(self) - - return self._baggage - - def _set_initial_sampling_decision(self, sampling_context): - # type: (SamplingContext) -> None - """ - Sets the transaction's sampling decision, according to the following - precedence rules: - - 1. If a sampling decision is passed to `start_transaction` - (`start_transaction(name: "my transaction", sampled: True)`), that - decision will be used, regardless of anything else - - 2. If `traces_sampler` is defined, its decision will be used. It can - choose to keep or ignore any parent sampling decision, or use the - sampling context data to make its own decision or to choose a sample - rate for the transaction. - - 3. If `traces_sampler` is not defined, but there's a parent sampling - decision, the parent sampling decision will be used. - - 4. If `traces_sampler` is not defined and there's no parent sampling - decision, `traces_sample_rate` will be used. - """ - client = sentry_sdk.get_client() - - transaction_description = "{op}transaction <{name}>".format( - op=("<" + self.op + "> " if self.op else ""), name=self.name - ) - - # nothing to do if tracing is disabled - if not has_tracing_enabled(client.options): - self.sampled = False - return - - # if the user has forced a sampling decision by passing a `sampled` - # value when starting the transaction, go with that - if self.sampled is not None: - self.sample_rate = float(self.sampled) - return - - # we would have bailed already if neither `traces_sampler` nor - # `traces_sample_rate` were defined, so one of these should work; prefer - # the hook if so - sample_rate = ( - client.options["traces_sampler"](sampling_context) - if callable(client.options.get("traces_sampler")) - else ( - # default inheritance behavior - sampling_context["parent_sampled"] - if sampling_context["parent_sampled"] is not None - else client.options["traces_sample_rate"] - ) - ) - - # Since this is coming from the user (or from a function provided by the - # user), who knows what we might get. (The only valid values are - # booleans or numbers between 0 and 1.) - if not is_valid_sample_rate(sample_rate, source="Tracing"): - logger.warning( - "[Tracing] Discarding {transaction_description} because of invalid sample rate.".format( - transaction_description=transaction_description, - ) - ) - self.sampled = False - return - - self.sample_rate = float(sample_rate) - - if client.monitor: - self.sample_rate /= 2**client.monitor.downsample_factor - - # if the function returned 0 (or false), or if `traces_sample_rate` is - # 0, it's a sign the transaction should be dropped - if not self.sample_rate: - logger.debug( - "[Tracing] Discarding {transaction_description} because {reason}".format( - transaction_description=transaction_description, - reason=( - "traces_sampler returned 0 or False" - if callable(client.options.get("traces_sampler")) - else "traces_sample_rate is set to 0" - ), - ) - ) - self.sampled = False - return - - # Now we roll the dice. - self.sampled = self._sample_rand < Decimal.from_float(self.sample_rate) - - if self.sampled: - logger.debug( - "[Tracing] Starting {transaction_description}".format( - transaction_description=transaction_description, - ) - ) - else: - logger.debug( - "[Tracing] Discarding {transaction_description} because it's not included in the random sample (sampling rate = {sample_rate})".format( - transaction_description=transaction_description, - sample_rate=self.sample_rate, - ) - ) - - -class NoOpSpan(Span): - def __repr__(self): - # type: () -> str - return "<%s>" % self.__class__.__name__ - - @property - def containing_transaction(self): - # type: () -> Optional[Transaction] - return None - - def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): - # type: (str, **Any) -> NoOpSpan - return NoOpSpan() - - def to_traceparent(self): - # type: () -> str - return "" - - def to_baggage(self): - # type: () -> Optional[Baggage] - return None - - def get_baggage(self): - # type: () -> Optional[Baggage] - return None - - def iter_headers(self): - # type: () -> Iterator[Tuple[str, str]] - return iter(()) - - def set_tag(self, key, value): - # type: (str, Any) -> None - pass - - def set_data(self, key, value): - # type: (str, Any) -> None - pass - - def set_status(self, value): - # type: (str) -> None - pass - def set_http_status(self, http_status): # type: (int) -> None - pass + self.set_attribute(SPANDATA.HTTP_STATUS_CODE, http_status) + self.set_status(get_span_status_from_http_code(http_status)) def is_success(self): # type: () -> bool - return True + return self.status == SPANSTATUS.OK + + def finish(self, end_timestamp=None): + # type: (Optional[Union[float, datetime]]) -> None + if end_timestamp is not None: + self._otel_span.end(convert_to_otel_timestamp(end_timestamp)) + else: + self._otel_span.end() def to_json(self): - # type: () -> Dict[str, Any] - return {} + # type: () -> dict[str, Any] + """ + Only meant for testing. Not used internally anymore. + """ + if not isinstance(self._otel_span, ReadableSpan): + return {} + return json.loads(self._otel_span.to_json()) def get_trace_context(self): - # type: () -> Any - return {} + # type: () -> dict[str, Any] + if not isinstance(self._otel_span, ReadableSpan): + return {} - def get_profile_context(self): - # type: () -> Any - return {} + return get_trace_context(self._otel_span) - def finish( - self, - scope=None, # type: Optional[sentry_sdk.Scope] - end_timestamp=None, # type: Optional[Union[float, datetime]] - *, - hub=None, # type: Optional[sentry_sdk.Hub] - ): - # type: (...) -> Optional[str] - """ - The `hub` parameter is deprecated. Please use the `scope` parameter, instead. - """ - pass + def set_context(self, key, value): + # type: (str, Any) -> None + # TODO-neel-potel we cannot add dicts here - def set_measurement(self, name, value, unit=""): - # type: (str, float, MeasurementUnit) -> None - pass + self.set_attribute(f"{SentrySpanAttribute.CONTEXT}.{key}", value) - def set_context(self, key, value): - # type: (str, dict[str, Any]) -> None - pass + def set_flag(self, flag, value): + # type: (str, bool) -> None + flag_count = self.get_attribute("_flag.count") or 0 + if flag_count < _FLAGS_CAPACITY: + self.set_attribute(f"flag.evaluation.{flag}", value) + self.set_attribute("_flag.count", flag_count + 1) - def init_span_recorder(self, maxlen): - # type: (int) -> None - pass - def _set_initial_sampling_decision(self, sampling_context): - # type: (SamplingContext) -> None - pass +# TODO-neel-potel add deprecation +Transaction = Span if TYPE_CHECKING: @@ -1369,20 +638,3 @@ async def my_async_function(): return start_child_span_decorator(func) else: return start_child_span_decorator - - -# Circular imports - -from sentry_sdk.tracing_utils import ( - Baggage, - EnvironHeaders, - extract_sentrytrace_data, - _generate_sample_rand, - has_tracing_enabled, - maybe_create_breadcrumbs_from_span, -) - -with warnings.catch_warnings(): - # The code in this file which uses `LocalAggregator` is only called from the deprecated `metrics` module. - warnings.simplefilter("ignore", DeprecationWarning) - from sentry_sdk.metrics import LocalAggregator diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py index 552f4fd59a..a323b84199 100644 --- a/sentry_sdk/tracing_utils.py +++ b/sentry_sdk/tracing_utils.py @@ -1,18 +1,25 @@ import contextlib +import decimal import inspect import os import re import sys +import uuid from collections.abc import Mapping -from datetime import timedelta +from datetime import datetime, timedelta, timezone from decimal import ROUND_DOWN, Decimal, DefaultContext, localcontext from functools import wraps from random import Random from urllib.parse import quote, unquote -import uuid import sentry_sdk -from sentry_sdk.consts import OP, SPANDATA +from sentry_sdk.consts import ( + OP, + SPANDATA, + SPANSTATUS, + BAGGAGE_HEADER_NAME, + SENTRY_TRACE_HEADER_NAME, +) from sentry_sdk.utils import ( capture_internal_exceptions, filename_for_module, @@ -21,7 +28,6 @@ match_regex_list, qualname_from_function, to_string, - try_convert, is_sentry_url, _is_external_source, _is_in_project_root, @@ -36,7 +42,6 @@ from typing import Generator from typing import Optional from typing import Union - from types import FrameType @@ -96,17 +101,14 @@ def has_tracing_enabled(options): # type: (Optional[Dict[str, Any]]) -> bool """ Returns True if either traces_sample_rate or traces_sampler is - defined and enable_tracing is set and not false. + defined. """ if options is None: return False return bool( - options.get("enable_tracing") is not False - and ( - options.get("traces_sample_rate") is not None - or options.get("traces_sampler") is not None - ) + options.get("traces_sample_rate") is not None + or options.get("traces_sampler") is not None ) @@ -118,7 +120,7 @@ def record_sql_queries( paramstyle, # type: Optional[str] executemany, # type: bool record_cursor_repr=False, # type: bool - span_origin="manual", # type: str + span_origin=None, # type: Optional[str] ): # type: (...) -> Generator[sentry_sdk.tracing.Span, None, None] @@ -152,44 +154,13 @@ def record_sql_queries( op=OP.DB, name=query, origin=span_origin, + only_if_parent=True, ) as span: for k, v in data.items(): - span.set_data(k, v) + span.set_attribute(k, v) yield span -def maybe_create_breadcrumbs_from_span(scope, span): - # type: (sentry_sdk.Scope, sentry_sdk.tracing.Span) -> None - if span.op == OP.DB_REDIS: - scope.add_breadcrumb( - message=span.description, type="redis", category="redis", data=span._tags - ) - - elif span.op == OP.HTTP_CLIENT: - level = None - status_code = span._data.get(SPANDATA.HTTP_STATUS_CODE) - if status_code: - if 500 <= status_code <= 599: - level = "error" - elif 400 <= status_code <= 499: - level = "warning" - - if level: - scope.add_breadcrumb( - type="http", category="httplib", data=span._data, level=level - ) - else: - scope.add_breadcrumb(type="http", category="httplib", data=span._data) - - elif span.op == "subprocess": - scope.add_breadcrumb( - type="subprocess", - category="subprocess", - message=span.description, - data=span._data, - ) - - def _get_frame_module_abs_path(frame): # type: (FrameType) -> Optional[str] try: @@ -227,14 +198,17 @@ def add_query_source(span): if not client.is_active(): return - if span.timestamp is None or span.start_timestamp is None: + if span.start_timestamp is None: return should_add_query_source = client.options.get("enable_db_query_source", True) if not should_add_query_source: return - duration = span.timestamp - span.start_timestamp + # We assume here that the query is just ending now. We can't use + # the actual end timestamp of the span because in OTel the span + # can't be finished in order to set any attributes on it. + duration = datetime.now(tz=timezone.utc) - span.start_timestamp threshold = client.options.get("db_query_source_threshold_ms", 0) slow_query = duration / timedelta(milliseconds=1) > threshold @@ -281,14 +255,14 @@ def add_query_source(span): except Exception: lineno = None if lineno is not None: - span.set_data(SPANDATA.CODE_LINENO, frame.f_lineno) + span.set_attribute(SPANDATA.CODE_LINENO, frame.f_lineno) try: namespace = frame.f_globals.get("__name__") except Exception: namespace = None if namespace is not None: - span.set_data(SPANDATA.CODE_NAMESPACE, namespace) + span.set_attribute(SPANDATA.CODE_NAMESPACE, namespace) filepath = _get_frame_module_abs_path(frame) if filepath is not None: @@ -298,7 +272,7 @@ def add_query_source(span): in_app_path = filepath.replace(project_root, "").lstrip(os.sep) else: in_app_path = filepath - span.set_data(SPANDATA.CODE_FILEPATH, in_app_path) + span.set_attribute(SPANDATA.CODE_FILEPATH, in_app_path) try: code_function = frame.f_code.co_name @@ -306,7 +280,7 @@ def add_query_source(span): code_function = None if code_function is not None: - span.set_data(SPANDATA.CODE_FUNCTION, frame.f_code.co_name) + span.set_attribute(SPANDATA.CODE_FUNCTION, frame.f_code.co_name) def extract_sentrytrace_data(header): @@ -371,7 +345,7 @@ class PropagationContext: "_span_id", "parent_span_id", "parent_sampled", - "dynamic_sampling_context", + "baggage", ) def __init__( @@ -380,7 +354,7 @@ def __init__( span_id=None, # type: Optional[str] parent_span_id=None, # type: Optional[str] parent_sampled=None, # type: Optional[bool] - dynamic_sampling_context=None, # type: Optional[Dict[str, str]] + baggage=None, # type: Optional[Baggage] ): # type: (...) -> None self._trace_id = trace_id @@ -398,8 +372,13 @@ def __init__( Important when the parent span originated in an upstream service, because we want to sample the whole trace, or nothing from the trace.""" - self.dynamic_sampling_context = dynamic_sampling_context - """Data that is used for dynamic sampling decisions.""" + self.baggage = baggage + """Baggage object used for dynamic sampling decisions.""" + + @property + def dynamic_sampling_context(self): + # type: () -> Optional[Dict[str, str]] + return self.baggage.dynamic_sampling_context() if self.baggage else None @classmethod def from_incoming_data(cls, incoming_data): @@ -410,9 +389,7 @@ def from_incoming_data(cls, incoming_data): baggage_header = normalized_data.get(BAGGAGE_HEADER_NAME) if baggage_header: propagation_context = PropagationContext() - propagation_context.dynamic_sampling_context = Baggage.from_incoming_header( - baggage_header - ).dynamic_sampling_context() + propagation_context.baggage = Baggage.from_incoming_header(baggage_header) sentry_trace_header = normalized_data.get(SENTRY_TRACE_HEADER_NAME) if sentry_trace_header: @@ -432,7 +409,6 @@ def trace_id(self): # type: () -> str """The trace id of the Sentry trace.""" if not self._trace_id: - # New trace, don't fill in sample_rand self._trace_id = uuid.uuid4().hex return self._trace_id @@ -467,22 +443,12 @@ def update(self, other_dict): except AttributeError: pass - def __repr__(self): - # type: (...) -> str - return "".format( - self._trace_id, - self._span_id, - self.parent_span_id, - self.parent_sampled, - self.dynamic_sampling_context, - ) - def _fill_sample_rand(self): # type: () -> None """ - Ensure that there is a valid sample_rand value in the dynamic_sampling_context. + Ensure that there is a valid sample_rand value in the baggage. - If there is a valid sample_rand value in the dynamic_sampling_context, we keep it. + If there is a valid sample_rand value in the baggage, we keep it. Otherwise, we generate a sample_rand value according to the following: - If we have a parent_sampled value and a sample_rate in the DSC, we compute @@ -497,21 +463,33 @@ def _fill_sample_rand(self): This function does nothing if there is no dynamic_sampling_context. """ - if self.dynamic_sampling_context is None: + if self.dynamic_sampling_context is None or self.baggage is None: return - sample_rand = try_convert( - Decimal, self.dynamic_sampling_context.get("sample_rand") - ) + sentry_baggage = self.baggage.sentry_items + + sample_rand = None + if sentry_baggage.get("sample_rand"): + try: + sample_rand = Decimal(sentry_baggage["sample_rand"]) + except Exception: + logger.debug( + f"Failed to convert incoming sample_rand to Decimal: {sample_rand}" + ) + if sample_rand is not None and 0 <= sample_rand < 1: # sample_rand is present and valid, so don't overwrite it return - # Get the sample rate and compute the transformation that will map the random value - # to the desired range: [0, 1), [0, sample_rate), or [sample_rate, 1). - sample_rate = try_convert( - float, self.dynamic_sampling_context.get("sample_rate") - ) + sample_rate = None + if sentry_baggage.get("sample_rate"): + try: + sample_rate = float(sentry_baggage["sample_rate"]) + except Exception: + logger.debug( + f"Failed to convert incoming sample_rate to float: {sample_rate}" + ) + lower, upper = _sample_rand_range(self.parent_sampled, sample_rate) try: @@ -527,17 +505,26 @@ def _fill_sample_rand(self): ) return - self.dynamic_sampling_context["sample_rand"] = ( - f"{sample_rand:.6f}" # noqa: E231 - ) + self.baggage.sentry_items["sample_rand"] = f"{sample_rand:.6f}" # noqa: E231 def _sample_rand(self): # type: () -> Optional[str] - """Convenience method to get the sample_rand value from the dynamic_sampling_context.""" - if self.dynamic_sampling_context is None: + """Convenience method to get the sample_rand value from the baggage.""" + if self.baggage is None: return None - return self.dynamic_sampling_context.get("sample_rand") + return self.baggage.sentry_items.get("sample_rand") + + def __repr__(self): + # type: (...) -> str + return "".format( + self._trace_id, + self._span_id, + self.parent_span_id, + self.parent_sampled, + self.baggage, + self.dynamic_sampling_context, + ) class Baggage: @@ -568,8 +555,6 @@ def __init__( def from_incoming_header( cls, header, # type: Optional[str] - *, - _sample_rand=None, # type: Optional[str] ): # type: (...) -> Baggage """ @@ -594,10 +579,6 @@ def from_incoming_header( else: third_party_items += ("," if third_party_items else "") + item - if _sample_rand is not None: - sentry_items["sample_rand"] = str(_sample_rand) - mutable = False - return Baggage(sentry_items, third_party_items, mutable) @classmethod @@ -633,53 +614,6 @@ def from_options(cls, scope): return Baggage(sentry_items, third_party_items, mutable) - @classmethod - def populate_from_transaction(cls, transaction): - # type: (sentry_sdk.tracing.Transaction) -> Baggage - """ - Populate fresh baggage entry with sentry_items and make it immutable - if this is the head SDK which originates traces. - """ - client = sentry_sdk.get_client() - sentry_items = {} # type: Dict[str, str] - - if not client.is_active(): - return Baggage(sentry_items) - - options = client.options or {} - - sentry_items["trace_id"] = transaction.trace_id - sentry_items["sample_rand"] = str(transaction._sample_rand) - - if options.get("environment"): - sentry_items["environment"] = options["environment"] - - if options.get("release"): - sentry_items["release"] = options["release"] - - if options.get("dsn"): - sentry_items["public_key"] = Dsn(options["dsn"]).public_key - - if ( - transaction.name - and transaction.source not in LOW_QUALITY_TRANSACTION_SOURCES - ): - sentry_items["transaction"] = transaction.name - - if transaction.sample_rate is not None: - sentry_items["sample_rate"] = str(transaction.sample_rate) - - if transaction.sampled is not None: - sentry_items["sampled"] = "true" if transaction.sampled else "false" - - # there's an existing baggage but it was mutable, - # which is why we are creating this new baggage. - # However, if by chance the user put some sentry items in there, give them precedence. - if transaction._baggage and transaction._baggage.sentry_items: - sentry_items.update(transaction._baggage.sentry_items) - - return Baggage(sentry_items, mutable=False) - def freeze(self): # type: () -> None self.mutable = False @@ -722,20 +656,6 @@ def strip_sentry_baggage(header): ) ) - def _sample_rand(self): - # type: () -> Optional[Decimal] - """Convenience method to get the sample_rand value from the sentry_items. - - We validate the value and parse it as a Decimal before returning it. The value is considered - valid if it is a Decimal in the range [0, 1). - """ - sample_rand = try_convert(Decimal, self.sentry_items.get("sample_rand")) - - if sample_rand is not None and Decimal(0) <= sample_rand < Decimal(1): - return sample_rand - - return None - def __repr__(self): # type: () -> str return f'' @@ -837,7 +757,7 @@ def func_with_tracing(*args, **kwargs): def get_current_span(scope=None): - # type: (Optional[sentry_sdk.Scope]) -> Optional[Span] + # type: (Optional[sentry_sdk.Scope]) -> Optional[sentry_sdk.tracing.Span] """ Returns the currently active span if there is one running, otherwise `None` """ @@ -848,10 +768,9 @@ def get_current_span(scope=None): def _generate_sample_rand( trace_id, # type: Optional[str] - *, interval=(0.0, 1.0), # type: tuple[float, float] ): - # type: (...) -> Decimal + # type: (...) -> Optional[decimal.Decimal] """Generate a sample_rand value from a trace ID. The generated value will be pseudorandomly chosen from the provided @@ -896,12 +815,40 @@ def _sample_rand_range(parent_sampled, sample_rate): return sample_rate, 1.0 -# Circular imports -from sentry_sdk.tracing import ( - BAGGAGE_HEADER_NAME, - LOW_QUALITY_TRANSACTION_SOURCES, - SENTRY_TRACE_HEADER_NAME, -) +def get_span_status_from_http_code(http_status_code): + # type: (int) -> str + """ + Returns the Sentry status corresponding to the given HTTP status code. -if TYPE_CHECKING: - from sentry_sdk.tracing import Span + See: https://develop.sentry.dev/sdk/event-payloads/contexts/#trace-context + """ + if http_status_code < 400: + return SPANSTATUS.OK + + elif 400 <= http_status_code < 500: + if http_status_code == 403: + return SPANSTATUS.PERMISSION_DENIED + elif http_status_code == 404: + return SPANSTATUS.NOT_FOUND + elif http_status_code == 429: + return SPANSTATUS.RESOURCE_EXHAUSTED + elif http_status_code == 413: + return SPANSTATUS.FAILED_PRECONDITION + elif http_status_code == 401: + return SPANSTATUS.UNAUTHENTICATED + elif http_status_code == 409: + return SPANSTATUS.ALREADY_EXISTS + else: + return SPANSTATUS.INVALID_ARGUMENT + + elif 500 <= http_status_code < 600: + if http_status_code == 504: + return SPANSTATUS.DEADLINE_EXCEEDED + elif http_status_code == 501: + return SPANSTATUS.UNIMPLEMENTED + elif http_status_code == 503: + return SPANSTATUS.UNAVAILABLE + else: + return SPANSTATUS.INTERNAL_ERROR + + return SPANSTATUS.UNKNOWN_ERROR diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py index f9a5262903..ec48f49be4 100644 --- a/sentry_sdk/transport.py +++ b/sentry_sdk/transport.py @@ -5,7 +5,6 @@ import socket import ssl import time -import warnings from datetime import datetime, timedelta, timezone from collections import defaultdict from urllib.request import getproxies @@ -18,7 +17,6 @@ import urllib3 import certifi -import sentry_sdk from sentry_sdk.consts import EndpointType from sentry_sdk.utils import Dsn, logger, capture_internal_exceptions from sentry_sdk.worker import BackgroundWorker @@ -41,7 +39,7 @@ from urllib3.poolmanager import PoolManager from urllib3.poolmanager import ProxyManager - from sentry_sdk._types import Event, EventDataCategory + from sentry_sdk._types import EventDataCategory KEEP_ALIVE_SOCKET_OPTIONS = [] for option in [ @@ -74,25 +72,6 @@ def __init__(self, options=None): else: self.parsed_dsn = None - def capture_event(self, event): - # type: (Self, Event) -> None - """ - DEPRECATED: Please use capture_envelope instead. - - This gets invoked with the event dictionary when an event should - be sent to sentry. - """ - - warnings.warn( - "capture_event is deprecated, please use capture_envelope instead!", - DeprecationWarning, - stacklevel=2, - ) - - envelope = Envelope() - envelope.add_event(event) - self.capture_envelope(envelope) - @abstractmethod def capture_envelope(self, envelope): # type: (Self, Envelope) -> None @@ -178,17 +157,8 @@ def _parse_rate_limits(header, now=None): retry_after = now + timedelta(seconds=int(retry_after_val)) for category in categories and categories.split(";") or (None,): - if category == "metric_bucket": - try: - namespaces = parameters[4].split(";") - except IndexError: - namespaces = [] - - if not namespaces or "custom" in namespaces: - yield category, retry_after # type: ignore - - else: - yield category, retry_after # type: ignore + category = cast("Optional[EventDataCategory]", category) + yield category, retry_after except (LookupError, ValueError): continue @@ -217,9 +187,6 @@ def __init__(self, options): self._pool = self._make_pool() - # Backwards compatibility for deprecated `self.hub_class` attribute - self._hub_cls = sentry_sdk.Hub - experiments = options.get("_experiments", {}) compression_level = experiments.get( "transport_compression_level", @@ -426,12 +393,6 @@ def _check_disabled(self, category): # type: (str) -> bool def _disabled(bucket): # type: (Any) -> bool - - # The envelope item type used for metrics is statsd - # whereas the rate limit category is metric_bucket - if bucket == "statsd": - bucket = "metric_bucket" - ts = self._disabled_until.get(bucket) return ts is not None and ts > datetime.now(timezone.utc) @@ -458,7 +419,7 @@ def _send_envelope(self, envelope): new_items = [] for item in envelope.items: if self._check_disabled(item.data_category): - if item.data_category in ("transaction", "error", "default", "statsd"): + if item.data_category in ("transaction", "error", "default"): self.on_dropped_event("self_rate_limits") self.record_lost_event("ratelimit_backoff", item=item) else: @@ -587,30 +548,6 @@ def kill(self): logger.debug("Killing HTTP transport") self._worker.kill() - @staticmethod - def _warn_hub_cls(): - # type: () -> None - """Convenience method to warn users about the deprecation of the `hub_cls` attribute.""" - warnings.warn( - "The `hub_cls` attribute is deprecated and will be removed in a future release.", - DeprecationWarning, - stacklevel=3, - ) - - @property - def hub_cls(self): - # type: (Self) -> type[sentry_sdk.Hub] - """DEPRECATED: This attribute is deprecated and will be removed in a future release.""" - HttpTransport._warn_hub_cls() - return self._hub_cls - - @hub_cls.setter - def hub_cls(self, value): - # type: (Self, type[sentry_sdk.Hub]) -> None - """DEPRECATED: This attribute is deprecated and will be removed in a future release.""" - HttpTransport._warn_hub_cls() - self._hub_cls = value - class HttpTransport(BaseHttpTransport): if TYPE_CHECKING: @@ -862,35 +799,6 @@ def _make_pool(self): return httpcore.ConnectionPool(**opts) -class _FunctionTransport(Transport): - """ - DEPRECATED: Users wishing to provide a custom transport should subclass - the Transport class, rather than providing a function. - """ - - def __init__( - self, func # type: Callable[[Event], None] - ): - # type: (...) -> None - Transport.__init__(self) - self._func = func - - def capture_event( - self, event # type: Event - ): - # type: (...) -> None - self._func(event) - return None - - def capture_envelope(self, envelope: Envelope) -> None: - # Since function transports expect to be called with an event, we need - # to iterate over the envelope and call the function for each event, via - # the deprecated capture_event method. - event = envelope.get_event() - if event is not None: - self.capture_event(event) - - def make_transport(options): # type: (Dict[str, Any]) -> Optional[Transport] ref_transport = options["transport"] @@ -906,14 +814,6 @@ def make_transport(options): return ref_transport elif isinstance(ref_transport, type) and issubclass(ref_transport, Transport): transport_cls = ref_transport - elif callable(ref_transport): - warnings.warn( - "Function transports are deprecated and will be removed in a future release." - "Please provide a Transport instance or subclass, instead.", - DeprecationWarning, - stacklevel=2, - ) - return _FunctionTransport(ref_transport) # if a transport class is given only instantiate it if the dsn is not # empty or None diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 595bbe0cf3..407d9613d5 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -25,7 +25,6 @@ BaseExceptionGroup = None # type: ignore import sentry_sdk -from sentry_sdk._compat import PY37 from sentry_sdk.consts import ( DEFAULT_ADD_FULL_STACK, DEFAULT_MAX_STACK_FRAMES, @@ -57,7 +56,8 @@ Union, ) - from gevent.hub import Hub + from gevent.hub import Hub as GeventHub + from opentelemetry.util.types import AttributeValue from sentry_sdk._types import Event, ExcInfo @@ -250,31 +250,6 @@ def format_timestamp(value): return utctime.strftime("%Y-%m-%dT%H:%M:%S.%fZ") -ISO_TZ_SEPARATORS = frozenset(("+", "-")) - - -def datetime_from_isoformat(value): - # type: (str) -> datetime - try: - result = datetime.fromisoformat(value) - except (AttributeError, ValueError): - # py 3.6 - timestamp_format = ( - "%Y-%m-%dT%H:%M:%S.%f" if "." in value else "%Y-%m-%dT%H:%M:%S" - ) - if value.endswith("Z"): - value = value[:-1] + "+0000" - - if value[-6] in ISO_TZ_SEPARATORS: - timestamp_format += "%z" - value = value[:-3] + value[-2:] - elif value[-5] in ISO_TZ_SEPARATORS: - timestamp_format += "%z" - - result = datetime.strptime(value, timestamp_format) - return result.astimezone(timezone.utc) - - def event_hint_with_exc_info(exc_info=None): # type: (Optional[ExcInfo]) -> Dict[str, Optional[ExcInfo]] """Creates a hint with the exc info filled in.""" @@ -822,14 +797,17 @@ def exceptions_from_error( ): # type: (...) -> Tuple[int, List[Dict[str, Any]]] """ - Creates the list of exceptions. - This can include chained exceptions and exceptions from an ExceptionGroup. - - See the Exception Interface documentation for more details: - https://develop.sentry.dev/sdk/event-payloads/exception/ + Converts the given exception information into the Sentry structured "exception" format. + This will return a list of exceptions (a flattened tree of exceptions) in the + format of the Exception Interface documentation: + https://develop.sentry.dev/sdk/data-model/event-payloads/exception/ + + This function can handle: + - simple exceptions + - chained exceptions (raise .. from ..) + - exception groups """ - - parent = single_exception_from_error_tuple( + base_exception = single_exception_from_error_tuple( exc_type=exc_type, exc_value=exc_value, tb=tb, @@ -840,64 +818,63 @@ def exceptions_from_error( source=source, full_stack=full_stack, ) - exceptions = [parent] + exceptions = [base_exception] parent_id = exception_id exception_id += 1 - should_supress_context = hasattr(exc_value, "__suppress_context__") and exc_value.__suppress_context__ # type: ignore - if should_supress_context: - # Add direct cause. - # The field `__cause__` is set when raised with the exception (using the `from` keyword). - exception_has_cause = ( + causing_exception = None + exception_source = None + + # Add any causing exceptions, if present. + should_suppress_context = hasattr(exc_value, "__suppress_context__") and exc_value.__suppress_context__ # type: ignore + # Note: __suppress_context__ is True if the exception is raised with the `from` keyword. + if should_suppress_context: + # Explicitly chained exceptions (Like: raise NewException() from OriginalException()) + # The field `__cause__` is set to OriginalException + has_explicit_causing_exception = ( exc_value and hasattr(exc_value, "__cause__") and exc_value.__cause__ is not None ) - if exception_has_cause: - cause = exc_value.__cause__ # type: ignore - (exception_id, child_exceptions) = exceptions_from_error( - exc_type=type(cause), - exc_value=cause, - tb=getattr(cause, "__traceback__", None), - client_options=client_options, - mechanism=mechanism, - exception_id=exception_id, - source="__cause__", - full_stack=full_stack, - ) - exceptions.extend(child_exceptions) - + if has_explicit_causing_exception: + exception_source = "__cause__" + causing_exception = exc_value.__cause__ # type: ignore else: - # Add indirect cause. - # The field `__context__` is assigned if another exception occurs while handling the exception. - exception_has_content = ( + # Implicitly chained exceptions (when an exception occurs while handling another exception) + # The field `__context__` is set in the exception that occurs while handling another exception, + # to the other exception. + has_implicit_causing_exception = ( exc_value and hasattr(exc_value, "__context__") and exc_value.__context__ is not None ) - if exception_has_content: - context = exc_value.__context__ # type: ignore - (exception_id, child_exceptions) = exceptions_from_error( - exc_type=type(context), - exc_value=context, - tb=getattr(context, "__traceback__", None), - client_options=client_options, - mechanism=mechanism, - exception_id=exception_id, - source="__context__", - full_stack=full_stack, - ) - exceptions.extend(child_exceptions) + if has_implicit_causing_exception: + exception_source = "__context__" + causing_exception = exc_value.__context__ # type: ignore + + if causing_exception: + (exception_id, child_exceptions) = exceptions_from_error( + exc_type=type(causing_exception), + exc_value=causing_exception, + tb=getattr(causing_exception, "__traceback__", None), + client_options=client_options, + mechanism=mechanism, + exception_id=exception_id, + parent_id=parent_id, + source=exception_source, + full_stack=full_stack, + ) + exceptions.extend(child_exceptions) - # Add exceptions from an ExceptionGroup. + # Add child exceptions from an ExceptionGroup. is_exception_group = exc_value and hasattr(exc_value, "exceptions") if is_exception_group: - for idx, e in enumerate(exc_value.exceptions): # type: ignore + for idx, causing_exception in enumerate(exc_value.exceptions): # type: ignore (exception_id, child_exceptions) = exceptions_from_error( - exc_type=type(e), - exc_value=e, - tb=getattr(e, "__traceback__", None), + exc_type=type(causing_exception), + exc_value=causing_exception, + tb=getattr(causing_exception, "__traceback__", None), client_options=client_options, mechanism=mechanism, exception_id=exception_id, @@ -917,38 +894,29 @@ def exceptions_from_error_tuple( full_stack=None, # type: Optional[list[dict[str, Any]]] ): # type: (...) -> List[Dict[str, Any]] + """ + Convert Python's exception information into Sentry's structured "exception" format in the event. + See https://develop.sentry.dev/sdk/data-model/event-payloads/exception/ + This is the entry point for the exception handling. + """ + # unpack the exception info tuple exc_type, exc_value, tb = exc_info - is_exception_group = BaseExceptionGroup is not None and isinstance( - exc_value, BaseExceptionGroup + # let exceptions_from_error do the actual work + _, exceptions = exceptions_from_error( + exc_type=exc_type, + exc_value=exc_value, + tb=tb, + client_options=client_options, + mechanism=mechanism, + exception_id=0, + parent_id=0, + full_stack=full_stack, ) - if is_exception_group: - (_, exceptions) = exceptions_from_error( - exc_type=exc_type, - exc_value=exc_value, - tb=tb, - client_options=client_options, - mechanism=mechanism, - exception_id=0, - parent_id=0, - full_stack=full_stack, - ) - - else: - exceptions = [] - for exc_type, exc_value, tb in walk_exception_chain(exc_info): - exceptions.append( - single_exception_from_error_tuple( - exc_type=exc_type, - exc_value=exc_value, - tb=tb, - client_options=client_options, - mechanism=mechanism, - full_stack=full_stack, - ) - ) - + # make sure the exceptions are sorted + # from the innermost (oldest) + # to the outermost (newest) exception exceptions.reverse() return exceptions @@ -1372,27 +1340,13 @@ def _get_contextvars(): See https://docs.sentry.io/platforms/python/contextvars/ for more information. """ if not _is_contextvars_broken(): - # aiocontextvars is a PyPI package that ensures that the contextvars - # backport (also a PyPI package) works with asyncio under Python 3.6 - # - # Import it if available. - if sys.version_info < (3, 7): - # `aiocontextvars` is absolutely required for functional - # contextvars on Python 3.6. - try: - from aiocontextvars import ContextVar - - return True, ContextVar - except ImportError: - pass - else: - # On Python 3.7 contextvars are functional. - try: - from contextvars import ContextVar + # On Python 3.7+ contextvars are functional. + try: + from contextvars import ContextVar - return True, ContextVar - except ImportError: - pass + return True, ContextVar + except ImportError: + pass # Fall back to basic thread-local usage. @@ -1792,7 +1746,7 @@ def ensure_integration_enabled( ```python @ensure_integration_enabled(MyIntegration, my_function) def patch_my_function(): - with sentry_sdk.start_transaction(...): + with sentry_sdk.start_span(...): return my_function() ``` """ @@ -1818,19 +1772,6 @@ def runner(*args: "P.args", **kwargs: "P.kwargs"): return patcher -if PY37: - - def nanosecond_time(): - # type: () -> int - return time.perf_counter_ns() - -else: - - def nanosecond_time(): - # type: () -> int - return int(time.perf_counter() * 1e9) - - def now(): # type: () -> float return time.perf_counter() @@ -1842,9 +1783,9 @@ def now(): except ImportError: # it's not great that the signatures are different, get_hub can't return None - # consider adding an if TYPE_CHECKING to change the signature to Optional[Hub] + # consider adding an if TYPE_CHECKING to change the signature to Optional[GeventHub] def get_gevent_hub(): # type: ignore[misc] - # type: () -> Optional[Hub] + # type: () -> Optional[GeventHub] return None def is_module_patched(mod_name): @@ -1909,6 +1850,56 @@ def get_current_thread_meta(thread=None): return None, None +def _serialize_span_attribute(value): + # type: (Any) -> Optional[AttributeValue] + """Serialize an object so that it's OTel-compatible and displays nicely in Sentry.""" + # check for allowed primitives + if isinstance(value, (int, str, float, bool)): + return value + + # lists are allowed too, as long as they don't mix types + if isinstance(value, (list, tuple)): + for type_ in (int, str, float, bool): + if all(isinstance(item, type_) for item in value): + return list(value) + + # if this is anything else, just try to coerce to string + # we prefer json.dumps since this makes things like dictionaries display + # nicely in the UI + try: + return json.dumps(value) + except TypeError: + try: + return str(value) + except Exception: + return None + + +ISO_TZ_SEPARATORS = frozenset(("+", "-")) + + +def datetime_from_isoformat(value): + # type: (str) -> datetime + try: + result = datetime.fromisoformat(value) + except (AttributeError, ValueError): + # py 3.6 + timestamp_format = ( + "%Y-%m-%dT%H:%M:%S.%f" if "." in value else "%Y-%m-%dT%H:%M:%S" + ) + if value.endswith("Z"): + value = value[:-1] + "+0000" + + if value[-6] in ISO_TZ_SEPARATORS: + timestamp_format += "%z" + value = value[:-3] + value[-2:] + elif value[-5] in ISO_TZ_SEPARATORS: + timestamp_format += "%z" + + result = datetime.strptime(value, timestamp_format) + return result.astimezone(timezone.utc) + + def should_be_treated_as_error(ty, value): # type: (Any, Any) -> bool if ty == SystemExit and hasattr(value, "code") and value.code in (0, None): @@ -1918,18 +1909,12 @@ def should_be_treated_as_error(ty, value): return True -if TYPE_CHECKING: - T = TypeVar("T") - +def http_client_status_to_breadcrumb_level(status_code): + # type: (Optional[int]) -> str + if status_code is not None: + if 500 <= status_code <= 599: + return "error" + elif 400 <= status_code <= 499: + return "warning" -def try_convert(convert_func, value): - # type: (Callable[[Any], T], Any) -> Optional[T] - """ - Attempt to convert from an unknown type to a specific type, using the - given function. Return None if the conversion fails, i.e. if the function - raises an exception. - """ - try: - return convert_func(value) - except Exception: - return None + return "info" diff --git a/setup.py b/setup.py index 877585472b..a1b594c9c8 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="2.27.0", + version="3.0.0", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", @@ -37,10 +37,11 @@ def get_file_text(file_name): package_data={"sentry_sdk": ["py.typed"]}, zip_safe=False, license="MIT", - python_requires=">=3.6", + python_requires=">=3.7", install_requires=[ "urllib3>=1.26.11", "certifi", + "opentelemetry-sdk>=1.4.0", ], extras_require={ "aiohttp": ["aiohttp>=3.5"], @@ -69,7 +70,6 @@ def get_file_text(file_name): "openai": ["openai>=1.0.0", "tiktoken>=0.3.0"], "openfeature": ["openfeature-sdk>=0.7.1"], "opentelemetry": ["opentelemetry-distro>=0.35b0"], - "opentelemetry-experimental": ["opentelemetry-distro"], "pure-eval": ["pure_eval", "executing", "asttokens"], "pymongo": ["pymongo>=3.1"], "pyspark": ["pyspark>=2.4.4"], @@ -85,8 +85,8 @@ def get_file_text(file_name): }, entry_points={ "opentelemetry_propagator": [ - "sentry=sentry_sdk.integrations.opentelemetry:SentryPropagator" - ] + "sentry=sentry_sdk.opentelemetry:SentryPropagator" + ], }, classifiers=[ "Development Status :: 5 - Production/Stable", @@ -96,7 +96,6 @@ def get_file_text(file_name): "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", diff --git a/tests/conftest.py b/tests/conftest.py index b5f3f8b00e..5987265e32 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -25,11 +25,10 @@ import sentry_sdk.utils from sentry_sdk.envelope import Envelope from sentry_sdk.integrations import ( # noqa: F401 - _DEFAULT_INTEGRATIONS, _installed_integrations, _processed_integrations, ) -from sentry_sdk.profiler import teardown_profiler +from sentry_sdk.profiler.transaction_profiler import teardown_profiler from sentry_sdk.profiler.continuous_profiler import teardown_continuous_profiler from sentry_sdk.transport import Transport from sentry_sdk.utils import reraise @@ -64,6 +63,10 @@ def benchmark(): from sentry_sdk import scope +from sentry_sdk.opentelemetry.scope import ( + setup_scope_context_management, + setup_initial_scopes, +) @pytest.fixture(autouse=True) @@ -75,6 +78,8 @@ def clean_scopes(): scope._isolation_scope.set(None) scope._current_scope.set(None) + setup_initial_scopes() + @pytest.fixture(autouse=True) def internal_exceptions(request): @@ -174,13 +179,8 @@ def reset_integrations(): with a clean slate to ensure monkeypatching works well, but this also means some other stuff will be monkeypatched twice. """ - global _DEFAULT_INTEGRATIONS, _processed_integrations - try: - _DEFAULT_INTEGRATIONS.remove( - "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration" - ) - except ValueError: - pass + global _installed_integrations, _processed_integrations + _processed_integrations.clear() _installed_integrations.clear() @@ -199,6 +199,7 @@ def inner(identifier): @pytest.fixture def sentry_init(request): def inner(*a, **kw): + setup_scope_context_management() kw.setdefault("transport", TestTransport()) client = sentry_sdk.Client(*a, **kw) sentry_sdk.get_global_scope().set_client(client) @@ -663,3 +664,14 @@ def __eq__(self, other): def __ne__(self, other): return not self.__eq__(other) + + +class SortedBaggage: + def __init__(self, baggage): + self.baggage = baggage + + def __eq__(self, other): + return sorted(self.baggage.split(",")) == sorted(other.split(",")) + + def __ne__(self, other): + return not self.__eq__(other) diff --git a/tests/integrations/aiohttp/test_aiohttp.py b/tests/integrations/aiohttp/test_aiohttp.py index 06859b127f..bc019d54a4 100644 --- a/tests/integrations/aiohttp/test_aiohttp.py +++ b/tests/integrations/aiohttp/test_aiohttp.py @@ -1,6 +1,6 @@ import asyncio import json - +import re from contextlib import suppress from unittest import mock @@ -13,7 +13,6 @@ from aiohttp import web, ClientSession from aiohttp.client import ServerDisconnectedError -from aiohttp.web_request import Request from aiohttp.web_exceptions import ( HTTPInternalServerError, HTTPNetworkAuthenticationRequired, @@ -22,7 +21,7 @@ HTTPUnavailableForLegalReasons, ) -from sentry_sdk import capture_message, start_transaction +from sentry_sdk import capture_message, start_span from sentry_sdk.integrations.aiohttp import AioHttpIntegration from tests.conftest import ApproxDict @@ -306,13 +305,12 @@ async def hello(request): @pytest.mark.asyncio -async def test_traces_sampler_gets_request_object_in_sampling_context( +async def test_traces_sampler_gets_attributes_in_sampling_context( sentry_init, aiohttp_client, - DictionaryContaining, # noqa: N803 - ObjectDescribedBy, # noqa: N803 ): - traces_sampler = mock.Mock() + traces_sampler = mock.Mock(return_value=True) + sentry_init( integrations=[AioHttpIntegration()], traces_sampler=traces_sampler, @@ -325,17 +323,24 @@ async def kangaroo_handler(request): app.router.add_get("/tricks/kangaroo", kangaroo_handler) client = await aiohttp_client(app) - await client.get("/tricks/kangaroo") + await client.get( + "/tricks/kangaroo?jump=high", headers={"Custom-Header": "Custom Value"} + ) - traces_sampler.assert_any_call( - DictionaryContaining( - { - "aiohttp_request": ObjectDescribedBy( - type=Request, attrs={"method": "GET", "path": "/tricks/kangaroo"} - ) - } - ) + assert traces_sampler.call_count == 1 + sampling_context = traces_sampler.call_args_list[0][0][0] + assert isinstance(sampling_context, dict) + assert re.match( + r"http:\/\/127\.0\.0\.1:[0-9]{4,5}\/tricks\/kangaroo\?jump=high", + sampling_context["url.full"], ) + assert sampling_context["url.path"] == "/tricks/kangaroo" + assert sampling_context["url.query"] == "jump=high" + assert sampling_context["url.scheme"] == "http" + assert sampling_context["http.request.method"] == "GET" + assert sampling_context["server.address"] == "127.0.0.1" + assert sampling_context["server.port"].isnumeric() + assert sampling_context["http.request.header.custom-header"] == "Custom Value" @pytest.mark.asyncio @@ -426,7 +431,7 @@ async def hello(request): # The aiohttp_client is instrumented so will generate the sentry-trace header and add request. # Get the sentry-trace header from the request so we can later compare with transaction events. client = await aiohttp_client(app) - with start_transaction(): + with start_span(name="request"): # Headers are only added to the span if there is an active transaction resp = await client.get("/") @@ -505,7 +510,7 @@ async def handler(request): raw_server = await aiohttp_raw_server(handler) - with start_transaction(): + with start_span(name="breadcrumb"): events = capture_events() client = await aiohttp_client(raw_server) @@ -534,8 +539,8 @@ async def handler(request): @pytest.mark.parametrize( "status_code,level", [ - (200, None), - (301, None), + (200, "info"), + (301, "info"), (403, "warning"), (405, "warning"), (500, "error"), @@ -558,7 +563,7 @@ async def handler(request): raw_server = await aiohttp_raw_server(handler) - with start_transaction(): + with start_span(name="crumbs"): events = capture_events() client = await aiohttp_client(raw_server) @@ -570,10 +575,7 @@ async def handler(request): crumb = event["breadcrumbs"]["values"][0] assert crumb["type"] == "http" - if level is None: - assert "level" not in crumb - else: - assert crumb["level"] == level + assert crumb["level"] == level assert crumb["category"] == "httplib" assert crumb["data"] == ApproxDict( { @@ -587,34 +589,39 @@ async def handler(request): @pytest.mark.asyncio -async def test_outgoing_trace_headers(sentry_init, aiohttp_raw_server, aiohttp_client): +async def test_outgoing_trace_headers( + sentry_init, aiohttp_raw_server, aiohttp_client, capture_envelopes +): sentry_init( integrations=[AioHttpIntegration()], traces_sample_rate=1.0, ) + envelopes = capture_envelopes() + async def handler(request): return web.Response(text="OK") raw_server = await aiohttp_raw_server(handler) - with start_transaction( + with start_span( name="/interactions/other-dogs/new-dog", op="greeting.sniff", - # make trace_id difference between transactions - trace_id="0123456789012345678901234567890", ) as transaction: client = await aiohttp_client(raw_server) resp = await client.get("/") - request_span = transaction._span_recorder.spans[-1] - - assert resp.request_info.headers[ - "sentry-trace" - ] == "{trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction.trace_id, - parent_span_id=request_span.span_id, - sampled=1, - ) + + (envelope,) = envelopes + transaction = envelope.get_transaction_event() + request_span = transaction["spans"][-1] + + assert resp.request_info.headers[ + "sentry-trace" + ] == "{trace_id}-{parent_span_id}-{sampled}".format( + trace_id=transaction["contexts"]["trace"]["trace_id"], + parent_span_id=request_span["span_id"], + sampled=1, + ) @pytest.mark.asyncio @@ -633,17 +640,24 @@ async def handler(request): raw_server = await aiohttp_raw_server(handler) with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.5): - with start_transaction( + with start_span( name="/interactions/other-dogs/new-dog", op="greeting.sniff", - trace_id="0123456789012345678901234567890", - ): + ) as transaction: client = await aiohttp_client(raw_server) resp = await client.get("/", headers={"bagGage": "custom=value"}) - assert ( - resp.request_info.headers["baggage"] - == "custom=value,sentry-trace_id=0123456789012345678901234567890,sentry-sample_rand=0.500000,sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true" + assert sorted(resp.request_info.headers["baggage"].split(",")) == sorted( + [ + "custom=value", + f"sentry-trace_id={transaction.trace_id}", + "sentry-environment=production", + "sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42", + "sentry-transaction=/interactions/other-dogs/new-dog", + "sentry-sample_rate=1.0", + "sentry-sampled=true", + "sentry-sample_rand=0.500000", + ] ) diff --git a/tests/integrations/anthropic/test_anthropic.py b/tests/integrations/anthropic/test_anthropic.py index 7f6622a1ba..5da9b870eb 100644 --- a/tests/integrations/anthropic/test_anthropic.py +++ b/tests/integrations/anthropic/test_anthropic.py @@ -21,7 +21,7 @@ async def __call__(self, *args, **kwargs): from anthropic.types.message_start_event import MessageStartEvent from sentry_sdk.integrations.anthropic import _add_ai_data_to_span, _collect_ai_data -from sentry_sdk.utils import package_version +from sentry_sdk.utils import _serialize_span_attribute, package_version try: from anthropic.types import InputJSONDelta @@ -44,7 +44,7 @@ async def __call__(self, *args, **kwargs): except ImportError: from anthropic.types.content_block import ContentBlock as TextBlock -from sentry_sdk import start_transaction, start_span +from sentry_sdk import start_span from sentry_sdk.consts import OP, SPANDATA from sentry_sdk.integrations.anthropic import AnthropicIntegration @@ -92,7 +92,7 @@ def test_nonstreaming_create_message( } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): response = client.messages.create( max_tokens=1024, messages=messages, model="model" ) @@ -117,17 +117,19 @@ def test_nonstreaming_create_message( assert span["data"][SPANDATA.AI_MODEL_ID] == "model" if send_default_pii and include_prompts: - assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages - assert span["data"][SPANDATA.AI_RESPONSES] == [ - {"type": "text", "text": "Hi, I'm Claude."} - ] + assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == _serialize_span_attribute( + messages + ) + assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute( + [{"type": "text", "text": "Hi, I'm Claude."}] + ) else: assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] assert SPANDATA.AI_RESPONSES not in span["data"] - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 10 - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 20 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 30 + assert span["data"]["ai.prompt_tokens.used"] == 10 + assert span["data"]["ai.completion_tokens.used"] == 20 + assert span["data"]["ai.total_tokens.used"] == 30 assert span["data"]["ai.streaming"] is False @@ -160,7 +162,7 @@ async def test_nonstreaming_create_message_async( } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): response = await client.messages.create( max_tokens=1024, messages=messages, model="model" ) @@ -185,17 +187,19 @@ async def test_nonstreaming_create_message_async( assert span["data"][SPANDATA.AI_MODEL_ID] == "model" if send_default_pii and include_prompts: - assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages - assert span["data"][SPANDATA.AI_RESPONSES] == [ - {"type": "text", "text": "Hi, I'm Claude."} - ] + assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == _serialize_span_attribute( + messages + ) + assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute( + [{"type": "text", "text": "Hi, I'm Claude."}] + ) else: assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] assert SPANDATA.AI_RESPONSES not in span["data"] - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 10 - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 20 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 30 + assert span["data"]["ai.prompt_tokens.used"] == 10 + assert span["data"]["ai.completion_tokens.used"] == 20 + assert span["data"]["ai.total_tokens.used"] == 30 assert span["data"]["ai.streaming"] is False @@ -261,7 +265,7 @@ def test_streaming_create_message( } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): message = client.messages.create( max_tokens=1024, messages=messages, model="model", stream=True ) @@ -284,18 +288,20 @@ def test_streaming_create_message( assert span["data"][SPANDATA.AI_MODEL_ID] == "model" if send_default_pii and include_prompts: - assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages - assert span["data"][SPANDATA.AI_RESPONSES] == [ - {"type": "text", "text": "Hi! I'm Claude!"} - ] + assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == _serialize_span_attribute( + messages + ) + assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute( + [{"type": "text", "text": "Hi! I'm Claude!"}] + ) else: assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] assert SPANDATA.AI_RESPONSES not in span["data"] - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 10 - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 30 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 40 + assert span["data"]["ai.prompt_tokens.used"] == 10 + assert span["data"]["ai.completion_tokens.used"] == 30 + assert span["data"]["ai.total_tokens.used"] == 40 assert span["data"]["ai.streaming"] is True @@ -364,7 +370,7 @@ async def test_streaming_create_message_async( } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): message = await client.messages.create( max_tokens=1024, messages=messages, model="model", stream=True ) @@ -387,18 +393,20 @@ async def test_streaming_create_message_async( assert span["data"][SPANDATA.AI_MODEL_ID] == "model" if send_default_pii and include_prompts: - assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages - assert span["data"][SPANDATA.AI_RESPONSES] == [ - {"type": "text", "text": "Hi! I'm Claude!"} - ] + assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == _serialize_span_attribute( + messages + ) + assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute( + [{"type": "text", "text": "Hi! I'm Claude!"}] + ) else: assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] assert SPANDATA.AI_RESPONSES not in span["data"] - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 10 - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 30 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 40 + assert span["data"]["ai.prompt_tokens.used"] == 10 + assert span["data"]["ai.completion_tokens.used"] == 30 + assert span["data"]["ai.total_tokens.used"] == 40 assert span["data"]["ai.streaming"] is True @@ -494,7 +502,7 @@ def test_streaming_create_message_with_input_json_delta( } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): message = client.messages.create( max_tokens=1024, messages=messages, model="model", stream=True ) @@ -517,17 +525,20 @@ def test_streaming_create_message_with_input_json_delta( assert span["data"][SPANDATA.AI_MODEL_ID] == "model" if send_default_pii and include_prompts: - assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages - assert span["data"][SPANDATA.AI_RESPONSES] == [ - {"text": "{'location': 'San Francisco, CA'}", "type": "text"} - ] + assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == _serialize_span_attribute( + messages + ) + assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute( + [{"type": "text", "text": "{'location': 'San Francisco, CA'}"}] + ) # we do not record InputJSONDelta because it could contain PII + else: assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] assert SPANDATA.AI_RESPONSES not in span["data"] - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 366 - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 51 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 417 + assert span["data"]["ai.prompt_tokens.used"] == 366 + assert span["data"]["ai.completion_tokens.used"] == 51 + assert span["data"]["ai.total_tokens.used"] == 417 assert span["data"]["ai.streaming"] is True @@ -630,7 +641,7 @@ async def test_streaming_create_message_with_input_json_delta_async( } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): message = await client.messages.create( max_tokens=1024, messages=messages, model="model", stream=True ) @@ -653,21 +664,24 @@ async def test_streaming_create_message_with_input_json_delta_async( assert span["data"][SPANDATA.AI_MODEL_ID] == "model" if send_default_pii and include_prompts: - assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages - assert span["data"][SPANDATA.AI_RESPONSES] == [ - {"text": "{'location': 'San Francisco, CA'}", "type": "text"} - ] + assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == _serialize_span_attribute( + messages + ) + assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute( + [{"type": "text", "text": "{'location': 'San Francisco, CA'}"}] + ) # we do not record InputJSONDelta because it could contain PII else: assert SPANDATA.AI_INPUT_MESSAGES not in span["data"] assert SPANDATA.AI_RESPONSES not in span["data"] - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 366 - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 51 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 417 + assert span["data"]["ai.prompt_tokens.used"] == 366 + assert span["data"]["ai.completion_tokens.used"] == 51 + assert span["data"]["ai.total_tokens.used"] == 417 assert span["data"]["ai.streaming"] is True +@pytest.mark.forked def test_exception_message_create(sentry_init, capture_events): sentry_init(integrations=[AnthropicIntegration()], traces_sample_rate=1.0) events = capture_events() @@ -724,7 +738,7 @@ def test_span_origin(sentry_init, capture_events): } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): client.messages.create(max_tokens=1024, messages=messages, model="model") (event,) = events @@ -751,7 +765,7 @@ async def test_span_origin_async(sentry_init, capture_events): } ] - with start_transaction(name="anthropic"): + with start_span(name="anthropic"): await client.messages.create(max_tokens=1024, messages=messages, model="model") (event,) = events @@ -788,29 +802,35 @@ def test_collect_ai_data_with_input_json_delta(): ANTHROPIC_VERSION < (0, 27), reason="Versions <0.27.0 do not include InputJSONDelta.", ) -def test_add_ai_data_to_span_with_input_json_delta(sentry_init): +def test_add_ai_data_to_span_with_input_json_delta(sentry_init, capture_events): sentry_init( integrations=[AnthropicIntegration(include_prompts=True)], traces_sample_rate=1.0, send_default_pii=True, ) + events = capture_events() - with start_transaction(name="test"): - span = start_span() - integration = AnthropicIntegration() + with start_span(name="test"): + with start_span(name="anthropic") as span: + integration = AnthropicIntegration() - _add_ai_data_to_span( - span, - integration, - input_tokens=10, - output_tokens=20, - content_blocks=["{'test': 'data',", "'more': 'json'}"], - ) + _add_ai_data_to_span( + span, + integration, + input_tokens=10, + output_tokens=20, + content_blocks=["{'test': 'data',", "'more': 'json'}"], + ) - assert span._data.get(SPANDATA.AI_RESPONSES) == [ - {"type": "text", "text": "{'test': 'data','more': 'json'}"} - ] - assert span._data.get("ai.streaming") is True - assert span._measurements.get("ai_prompt_tokens_used")["value"] == 10 - assert span._measurements.get("ai_completion_tokens_used")["value"] == 20 - assert span._measurements.get("ai_total_tokens_used")["value"] == 30 + (event,) = events + + assert len(event["spans"]) == 1 + (span,) = event["spans"] + + assert span["data"][SPANDATA.AI_RESPONSES] == _serialize_span_attribute( + [{"type": "text", "text": "{'test': 'data','more': 'json'}"}] + ) + assert span["data"]["ai.streaming"] is True + assert span["data"]["ai.prompt_tokens.used"] == 10 + assert span["data"]["ai.completion_tokens.used"] == 20 + assert span["data"]["ai.total_tokens.used"] == 30 diff --git a/tests/integrations/ariadne/test_ariadne.py b/tests/integrations/ariadne/test_ariadne.py index 2c3b086aa5..6637a88451 100644 --- a/tests/integrations/ariadne/test_ariadne.py +++ b/tests/integrations/ariadne/test_ariadne.py @@ -68,7 +68,9 @@ def test_capture_request_and_response_if_send_pii_is_on_async( assert len(events) == 1 (event,) = events - assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne" + assert len(event["exception"]["values"]) == 2 + assert event["exception"]["values"][0]["mechanism"]["type"] == "chained" + assert event["exception"]["values"][-1]["mechanism"]["type"] == "ariadne" assert event["contexts"]["response"] == { "data": { "data": {"error": None}, @@ -111,7 +113,10 @@ def graphql_server(): assert len(events) == 1 (event,) = events - assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne" + assert len(event["exception"]["values"]) == 2 + assert event["exception"]["values"][0]["mechanism"]["type"] == "chained" + assert event["exception"]["values"][-1]["mechanism"]["type"] == "ariadne" + assert event["contexts"]["response"] == { "data": { "data": {"error": None}, @@ -152,7 +157,10 @@ def test_do_not_capture_request_and_response_if_send_pii_is_off_async( assert len(events) == 1 (event,) = events - assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne" + assert len(event["exception"]["values"]) == 2 + assert event["exception"]["values"][0]["mechanism"]["type"] == "chained" + assert event["exception"]["values"][-1]["mechanism"]["type"] == "ariadne" + assert "data" not in event["request"] assert "response" not in event["contexts"] @@ -182,7 +190,9 @@ def graphql_server(): assert len(events) == 1 (event,) = events - assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne" + assert len(event["exception"]["values"]) == 2 + assert event["exception"]["values"][0]["mechanism"]["type"] == "chained" + assert event["exception"]["values"][-1]["mechanism"]["type"] == "ariadne" assert "data" not in event["request"] assert "response" not in event["contexts"] diff --git a/tests/integrations/arq/test_arq.py b/tests/integrations/arq/test_arq.py index d8b7e715f2..ce3d624f1e 100644 --- a/tests/integrations/arq/test_arq.py +++ b/tests/integrations/arq/test_arq.py @@ -3,7 +3,7 @@ import pytest -from sentry_sdk import get_client, start_transaction +from sentry_sdk import get_client, start_span from sentry_sdk.integrations.arq import ArqIntegration import arq.worker @@ -294,7 +294,7 @@ async def dummy_job(_): events = capture_events() - with start_transaction() as transaction: + with start_span(name="test") as transaction: await pool.enqueue_job("dummy_job") (event,) = events @@ -345,7 +345,7 @@ async def dummy_job(_): events = capture_events() - with start_transaction(): + with start_span(name="job"): await pool.enqueue_job("dummy_job") (event,) = events diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py index ec2796c140..9e97ae3651 100644 --- a/tests/integrations/asgi/test_asgi.py +++ b/tests/integrations/asgi/test_asgi.py @@ -720,3 +720,26 @@ async def test_custom_transaction_name( assert transaction_event["type"] == "transaction" assert transaction_event["transaction"] == "foobar" assert transaction_event["transaction_info"] == {"source": "custom"} + + +@pytest.mark.asyncio +async def test_asgi_scope_in_traces_sampler(sentry_init, asgi3_app): + def dummy_traces_sampler(sampling_context): + assert sampling_context["url.path"] == "/test" + assert sampling_context["url.scheme"] == "http" + assert sampling_context["url.query"] == "hello=there" + assert sampling_context["url.full"] == "/test?hello=there" + assert sampling_context["http.request.method"] == "GET" + assert sampling_context["network.protocol.version"] == "1.1" + assert sampling_context["network.protocol.name"] == "http" + assert sampling_context["http.request.header.custom-header"] == "Custom Value" + + sentry_init( + traces_sampler=dummy_traces_sampler, + traces_sample_rate=1.0, + ) + + app = SentryAsgiMiddleware(asgi3_app) + + async with TestClient(app) as client: + await client.get("/test?hello=there", headers={"Custom-Header": "Custom Value"}) diff --git a/tests/integrations/asyncio/test_asyncio.py b/tests/integrations/asyncio/test_asyncio.py index fb75bfc69b..2ae71f8f43 100644 --- a/tests/integrations/asyncio/test_asyncio.py +++ b/tests/integrations/asyncio/test_asyncio.py @@ -65,7 +65,7 @@ async def test_create_task( events = capture_events() - with sentry_sdk.start_transaction(name="test_transaction_for_create_task"): + with sentry_sdk.start_span(name="test_transaction_for_create_task"): with sentry_sdk.start_span(op="root", name="not so important"): tasks = [asyncio.create_task(foo()), asyncio.create_task(bar())] await asyncio.wait(tasks, return_when=asyncio.FIRST_EXCEPTION) @@ -108,7 +108,7 @@ async def test_gather( events = capture_events() - with sentry_sdk.start_transaction(name="test_transaction_for_gather"): + with sentry_sdk.start_span(name="test_transaction_for_gather"): with sentry_sdk.start_span(op="root", name="not so important"): await asyncio.gather(foo(), bar(), return_exceptions=True) @@ -150,7 +150,8 @@ async def test_exception( events = capture_events() - with sentry_sdk.start_transaction(name="test_exception"): + with sentry_sdk.start_span(name="test_exception"): + sentry_sdk.get_isolation_scope().set_transaction_name("test_exception") with sentry_sdk.start_span(op="root", name="not so important"): tasks = [asyncio.create_task(boom()), asyncio.create_task(bar())] await asyncio.wait(tasks, return_when=asyncio.FIRST_EXCEPTION) @@ -364,7 +365,7 @@ async def test_span_origin( events = capture_events() - with sentry_sdk.start_transaction(name="something"): + with sentry_sdk.start_span(name="something"): tasks = [ asyncio.create_task(foo()), ] diff --git a/tests/integrations/asyncpg/test_asyncpg.py b/tests/integrations/asyncpg/test_asyncpg.py index e36d15c5d2..579052da27 100644 --- a/tests/integrations/asyncpg/test_asyncpg.py +++ b/tests/integrations/asyncpg/test_asyncpg.py @@ -10,14 +10,6 @@ """ import os - - -PG_HOST = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_HOST", "localhost") -PG_PORT = int(os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PORT", "5432")) -PG_USER = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_USER", "postgres") -PG_PASSWORD = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PASSWORD", "sentry") -PG_NAME = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_NAME", "postgres") - import datetime from contextlib import contextmanager from unittest import mock @@ -26,17 +18,25 @@ import pytest import pytest_asyncio from asyncpg import connect, Connection +from freezegun import freeze_time -from sentry_sdk import capture_message, start_transaction +from sentry_sdk import capture_message, start_span from sentry_sdk.integrations.asyncpg import AsyncPGIntegration from sentry_sdk.consts import SPANDATA from sentry_sdk.tracing_utils import record_sql_queries from tests.conftest import ApproxDict +PG_HOST = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_HOST", "localhost") +PG_PORT = int(os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PORT", "5432")) +PG_USER = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_USER", "postgres") +PG_PASSWORD = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PASSWORD", "sentry") +PG_NAME = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_NAME", "postgres") + PG_CONNECTION_URI = "postgresql://{}:{}@{}/{}".format( PG_USER, PG_PASSWORD, PG_HOST, PG_NAME ) + CRUMBS_CONNECT = { "category": "query", "data": ApproxDict( @@ -84,7 +84,7 @@ async def test_connect(sentry_init, capture_events) -> None: capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -123,7 +123,7 @@ async def test_execute(sentry_init, capture_events) -> None: capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -179,7 +179,7 @@ async def test_execute_many(sentry_init, capture_events) -> None: capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -216,7 +216,7 @@ async def test_record_params(sentry_init, capture_events) -> None: capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -259,13 +259,13 @@ async def test_cursor(sentry_init, capture_events) -> None: async for record in conn.cursor( "SELECT * FROM users WHERE dob > $1", datetime.date(1970, 1, 1) ): - print(record) + pass await conn.close() capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -278,14 +278,24 @@ async def test_cursor(sentry_init, capture_events) -> None: "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)", "type": "default", }, - {"category": "query", "data": {}, "message": "BEGIN;", "type": "default"}, + { + "category": "query", + "data": {}, + "message": "BEGIN;", + "type": "default", + }, { "category": "query", "data": {}, "message": "SELECT * FROM users WHERE dob > $1", "type": "default", }, - {"category": "query", "data": {}, "message": "COMMIT;", "type": "default"}, + { + "category": "query", + "data": {}, + "message": "COMMIT;", + "type": "default", + }, ] @@ -306,24 +316,22 @@ async def test_cursor_manual(sentry_init, capture_events) -> None: ("Alice", "pw", datetime.date(1990, 12, 25)), ], ) - # + async with conn.transaction(): # Postgres requires non-scrollable cursors to be created # and used in a transaction. cur = await conn.cursor( "SELECT * FROM users WHERE dob > $1", datetime.date(1970, 1, 1) ) - record = await cur.fetchrow() - print(record) + await cur.fetchrow() while await cur.forward(1): - record = await cur.fetchrow() - print(record) + await cur.fetchrow() await conn.close() capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -336,14 +344,24 @@ async def test_cursor_manual(sentry_init, capture_events) -> None: "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)", "type": "default", }, - {"category": "query", "data": {}, "message": "BEGIN;", "type": "default"}, + { + "category": "query", + "data": {}, + "message": "BEGIN;", + "type": "default", + }, { "category": "query", "data": {}, "message": "SELECT * FROM users WHERE dob > $1", "type": "default", }, - {"category": "query", "data": {}, "message": "COMMIT;", "type": "default"}, + { + "category": "query", + "data": {}, + "message": "COMMIT;", + "type": "default", + }, ] @@ -367,14 +385,14 @@ async def test_prepared_stmt(sentry_init, capture_events) -> None: stmt = await conn.prepare("SELECT * FROM users WHERE name = $1") - print(await stmt.fetchval("Bob")) - print(await stmt.fetchval("Alice")) + await stmt.fetchval("Bob") + await stmt.fetchval("Alice") await conn.close() capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -426,7 +444,7 @@ async def test_connection_pool(sentry_init, capture_events) -> None: capture_message("hi") - (event,) = events + event = events[-1] for crumb in event["breadcrumbs"]["values"]: del crumb["timestamp"] @@ -471,7 +489,7 @@ async def test_connection_pool(sentry_init, capture_events) -> None: async def test_query_source_disabled(sentry_init, capture_events): sentry_options = { "integrations": [AsyncPGIntegration()], - "enable_tracing": True, + "traces_sample_rate": 1.0, "enable_db_query_source": False, "db_query_source_threshold_ms": 0, } @@ -480,7 +498,7 @@ async def test_query_source_disabled(sentry_init, capture_events): events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with start_span(name="test_span"): conn: Connection = await connect(PG_CONNECTION_URI) await conn.execute( @@ -509,7 +527,7 @@ async def test_query_source_enabled( ): sentry_options = { "integrations": [AsyncPGIntegration()], - "enable_tracing": True, + "traces_sample_rate": 1.0, "db_query_source_threshold_ms": 0, } if enable_db_query_source is not None: @@ -519,7 +537,7 @@ async def test_query_source_enabled( events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with start_span(name="test_span"): conn: Connection = await connect(PG_CONNECTION_URI) await conn.execute( @@ -545,14 +563,14 @@ async def test_query_source_enabled( async def test_query_source(sentry_init, capture_events): sentry_init( integrations=[AsyncPGIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=0, ) events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with start_span(name="test_span"): conn: Connection = await connect(PG_CONNECTION_URI) await conn.execute( @@ -595,7 +613,7 @@ async def test_query_source_with_module_in_search_path(sentry_init, capture_even """ sentry_init( integrations=[AsyncPGIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=0, ) @@ -604,7 +622,7 @@ async def test_query_source_with_module_in_search_path(sentry_init, capture_even from asyncpg_helpers.helpers import execute_query_in_connection - with start_transaction(name="test_transaction", sampled=True): + with start_span(name="test_span"): conn: Connection = await connect(PG_CONNECTION_URI) await execute_query_in_connection( @@ -641,31 +659,33 @@ async def test_query_source_with_module_in_search_path(sentry_init, capture_even async def test_no_query_source_if_duration_too_short(sentry_init, capture_events): sentry_init( integrations=[AsyncPGIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=100, ) events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with start_span(name="test_span"): conn: Connection = await connect(PG_CONNECTION_URI) @contextmanager def fake_record_sql_queries(*args, **kwargs): - with record_sql_queries(*args, **kwargs) as span: - pass - span.start_timestamp = datetime.datetime(2024, 1, 1, microsecond=0) - span.timestamp = datetime.datetime(2024, 1, 1, microsecond=99999) - yield span + with freeze_time(datetime.datetime(2024, 1, 1, microsecond=99999)): + with record_sql_queries(*args, **kwargs) as span: + yield span with mock.patch( - "sentry_sdk.integrations.asyncpg.record_sql_queries", - fake_record_sql_queries, + "sentry_sdk.tracing.Span.start_timestamp", + datetime.datetime(2024, 1, 1, microsecond=0, tzinfo=datetime.timezone.utc), ): - await conn.execute( - "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')", - ) + with mock.patch( + "sentry_sdk.integrations.asyncpg.record_sql_queries", + fake_record_sql_queries, + ): + await conn.execute( + "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')", + ) await conn.close() @@ -686,31 +706,33 @@ def fake_record_sql_queries(*args, **kwargs): async def test_query_source_if_duration_over_threshold(sentry_init, capture_events): sentry_init( integrations=[AsyncPGIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=100, ) events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with start_span(name="test_span"): conn: Connection = await connect(PG_CONNECTION_URI) @contextmanager def fake_record_sql_queries(*args, **kwargs): - with record_sql_queries(*args, **kwargs) as span: - pass - span.start_timestamp = datetime.datetime(2024, 1, 1, microsecond=0) - span.timestamp = datetime.datetime(2024, 1, 1, microsecond=100001) - yield span + with freeze_time(datetime.datetime(2024, 1, 1, microsecond=100001)): + with record_sql_queries(*args, **kwargs) as span: + yield span with mock.patch( - "sentry_sdk.integrations.asyncpg.record_sql_queries", - fake_record_sql_queries, + "sentry_sdk.tracing.Span.start_timestamp", + datetime.datetime(2024, 1, 1, microsecond=0, tzinfo=datetime.timezone.utc), ): - await conn.execute( - "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')", - ) + with mock.patch( + "sentry_sdk.integrations.asyncpg.record_sql_queries", + fake_record_sql_queries, + ): + await conn.execute( + "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')", + ) await conn.close() @@ -753,7 +775,7 @@ async def test_span_origin(sentry_init, capture_events): events = capture_events() - with start_transaction(name="test_transaction"): + with start_span(name="test_span"): conn: Connection = await connect(PG_CONNECTION_URI) await conn.execute("SELECT 1") diff --git a/tests/integrations/aws_lambda/lambda_functions_with_embedded_sdk/TracesSampler/index.py b/tests/integrations/aws_lambda/lambda_functions_with_embedded_sdk/TracesSampler/index.py index ce797faf71..bc2693d9b5 100644 --- a/tests/integrations/aws_lambda/lambda_functions_with_embedded_sdk/TracesSampler/index.py +++ b/tests/integrations/aws_lambda/lambda_functions_with_embedded_sdk/TracesSampler/index.py @@ -4,26 +4,14 @@ from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration # Global variables to store sampling context for verification -sampling_context_data = { - "aws_event_present": False, - "aws_context_present": False, - "event_data": None, -} +sampling_context_data = None def trace_sampler(sampling_context): # Store the sampling context for verification global sampling_context_data + sampling_context_data = sampling_context - # Check if aws_event and aws_context are in the sampling_context - if "aws_event" in sampling_context: - sampling_context_data["aws_event_present"] = True - sampling_context_data["event_data"] = sampling_context["aws_event"] - - if "aws_context" in sampling_context: - sampling_context_data["aws_context_present"] = True - - print("Sampling context data:", sampling_context_data) return 1.0 # Always sample diff --git a/tests/integrations/aws_lambda/test_aws_lambda.py b/tests/integrations/aws_lambda/test_aws_lambda.py index 85da7e0b14..5f608fcc5a 100644 --- a/tests/integrations/aws_lambda/test_aws_lambda.py +++ b/tests/integrations/aws_lambda/test_aws_lambda.py @@ -67,7 +67,7 @@ def test_environment(): try: # Wait for SAM to be ready - LocalLambdaStack.wait_for_stack() + LocalLambdaStack.wait_for_stack(log_file=debug_log_file) def before_test(): server.clear_envelopes() @@ -137,12 +137,12 @@ def test_basic_no_exception(lambda_client, test_environment): } assert transaction_event["contexts"]["trace"] == { "op": "function.aws", - "description": mock.ANY, "span_id": mock.ANY, "parent_span_id": mock.ANY, "trace_id": mock.ANY, "origin": "auto.function.aws_lambda", "data": mock.ANY, + "status": "ok", } @@ -178,7 +178,6 @@ def test_basic_exception(lambda_client, test_environment): } assert error_event["contexts"]["trace"] == { "op": "function.aws", - "description": mock.ANY, "span_id": mock.ANY, "parent_span_id": mock.ANY, "trace_id": mock.ANY, @@ -314,9 +313,7 @@ def test_non_dict_event( "headers": {"Host": "x1.io", "X-Forwarded-Proto": "https"}, "method": "GET", "url": "https://x1.io/1", - "query_string": { - "done": "f", - }, + "query_string": "done=f", } else: request_data = {"url": "awslambda:///BasicException"} @@ -343,7 +340,8 @@ def test_request_data(lambda_client, test_environment): "X-Forwarded-Proto": "https" }, "queryStringParameters": { - "bonkers": "true" + "bonkers": "true", + "wild": "false" }, "pathParameters": null, "stageVariables": null, @@ -373,7 +371,7 @@ def test_request_data(lambda_client, test_environment): "X-Forwarded-Proto": "https", }, "method": "GET", - "query_string": {"bonkers": "true"}, + "query_string": "bonkers=true&wild=false", "url": "https://iwsz2c7uwi.execute-api.us-east-1.amazonaws.com/asd", } @@ -457,7 +455,19 @@ def test_traces_sampler_has_correct_sampling_context(lambda_client, test_environ Test that aws_event and aws_context are passed in the custom_sampling_context when using the AWS Lambda integration. """ - test_payload = {"test_key": "test_value"} + test_payload = { + "test_key": "test_value", + "httpMethod": "GET", + "queryStringParameters": { + "test_query_param": "test_query_value", + }, + "path": "/test", + "headers": { + "X-Forwarded-Proto": "https", + "Host": "example.com", + "X-Bla": "blabla", + }, + } response = lambda_client.invoke( FunctionName="TracesSampler", Payload=json.dumps(test_payload), @@ -466,9 +476,28 @@ def test_traces_sampler_has_correct_sampling_context(lambda_client, test_environ sampling_context_data = json.loads(response_payload["body"])[ "sampling_context_data" ] - assert sampling_context_data.get("aws_event_present") is True - assert sampling_context_data.get("aws_context_present") is True - assert sampling_context_data.get("event_data", {}).get("test_key") == "test_value" + + assert sampling_context_data == { + "transaction_context": { + "name": "TracesSampler", + "op": "function.aws", + "source": "component", + }, + "http.request.method": "GET", + "url.query": "test_query_param=test_query_value", + "url.path": "/test", + "url.full": "https://example.com/test?test_query_param=test_query_value", + "network.protocol.name": "https", + "server.address": "example.com", + "faas.name": "TracesSampler", + "http.request.header.x-forwarded-proto": "https", + "http.request.header.host": "example.com", + "http.request.header.x-bla": "blabla", + "sentry.op": "function.aws", + "sentry.source": "component", + "parent_sampled": None, + "cloud.provider": "aws", + } @pytest.mark.parametrize( diff --git a/tests/integrations/aws_lambda/utils.py b/tests/integrations/aws_lambda/utils.py index d20c9352e7..3d590390ae 100644 --- a/tests/integrations/aws_lambda/utils.py +++ b/tests/integrations/aws_lambda/utils.py @@ -211,7 +211,7 @@ def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: ) @classmethod - def wait_for_stack(cls, timeout=60, port=SAM_PORT): + def wait_for_stack(cls, timeout=60, port=SAM_PORT, log_file=None): """ Wait for SAM to be ready, with timeout. """ @@ -219,8 +219,8 @@ def wait_for_stack(cls, timeout=60, port=SAM_PORT): while True: if time.time() - start_time > timeout: raise TimeoutError( - "AWS SAM failed to start within %s seconds. (Maybe Docker is not running?)" - % timeout + "AWS SAM failed to start within %s seconds. (Maybe Docker is not running, or new docker images could not be built in time?) Check the log for more details: %s" + % (timeout, log_file) ) try: diff --git a/tests/integrations/boto3/test_s3.py b/tests/integrations/boto3/test_s3.py index 97a1543b0f..71dc5ccc07 100644 --- a/tests/integrations/boto3/test_s3.py +++ b/tests/integrations/boto3/test_s3.py @@ -21,7 +21,7 @@ def test_basic(sentry_init, capture_events): events = capture_events() s3 = session.resource("s3") - with sentry_sdk.start_transaction() as transaction, MockResponse( + with sentry_sdk.start_span() as transaction, MockResponse( s3.meta.client, 200, {}, read_fixture("s3_list.xml") ): bucket = s3.Bucket("bucket") @@ -39,12 +39,43 @@ def test_basic(sentry_init, capture_events): assert span["description"] == "aws.s3.ListObjects" +def test_breadcrumb(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()]) + events = capture_events() + + try: + s3 = session.resource("s3") + with sentry_sdk.start_span(), MockResponse( + s3.meta.client, 200, {}, read_fixture("s3_list.xml") + ): + bucket = s3.Bucket("bucket") + # read bucket (this makes http request) + [obj for obj in bucket.objects.all()] + 1 / 0 + except Exception as e: + sentry_sdk.capture_exception(e) + + (_, event) = events + crumb = event["breadcrumbs"]["values"][0] + assert crumb == { + "type": "http", + "category": "httplib", + "data": { + "http.method": "GET", + "aws.request.url": "https://bucket.s3.amazonaws.com/", + "http.query": "encoding-type=url", + "http.fragment": "", + }, + "timestamp": mock.ANY, + } + + def test_streaming(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()]) events = capture_events() s3 = session.resource("s3") - with sentry_sdk.start_transaction() as transaction, MockResponse( + with sentry_sdk.start_span() as transaction, MockResponse( s3.meta.client, 200, {}, b"hello" ): obj = s3.Bucket("bucket").Object("foo.pdf") @@ -82,7 +113,7 @@ def test_streaming_close(sentry_init, capture_events): events = capture_events() s3 = session.resource("s3") - with sentry_sdk.start_transaction() as transaction, MockResponse( + with sentry_sdk.start_span() as transaction, MockResponse( s3.meta.client, 200, {}, b"hello" ): obj = s3.Bucket("bucket").Object("foo.pdf") @@ -111,7 +142,7 @@ def test_omit_url_data_if_parsing_fails(sentry_init, capture_events): "sentry_sdk.integrations.boto3.parse_url", side_effect=ValueError, ): - with sentry_sdk.start_transaction() as transaction, MockResponse( + with sentry_sdk.start_span() as transaction, MockResponse( s3.meta.client, 200, {}, read_fixture("s3_list.xml") ): bucket = s3.Bucket("bucket") @@ -139,7 +170,7 @@ def test_span_origin(sentry_init, capture_events): events = capture_events() s3 = session.resource("s3") - with sentry_sdk.start_transaction(), MockResponse( + with sentry_sdk.start_span(), MockResponse( s3.meta.client, 200, {}, read_fixture("s3_list.xml") ): bucket = s3.Bucket("bucket") diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index 8c794bd5ff..821a3bd10e 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -5,9 +5,11 @@ import pytest from celery import Celery, VERSION from celery.bin import worker +from celery.app.task import Task +from opentelemetry import trace as otel_trace, context import sentry_sdk -from sentry_sdk import start_transaction, get_current_span +from sentry_sdk import get_current_span from sentry_sdk.integrations.celery import ( CeleryIntegration, _wrap_task_run, @@ -126,14 +128,14 @@ def dummy_task(x, y): foo = 42 # noqa return x / y - with start_transaction(op="unit test transaction") as transaction: + with sentry_sdk.start_span(op="unit test transaction") as root_span: celery_invocation(dummy_task, 1, 2) _, expected_context = celery_invocation(dummy_task, 1, 0) (_, error_event, _, _) = events - assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id - assert error_event["contexts"]["trace"]["span_id"] != transaction.span_id + assert error_event["contexts"]["trace"]["trace_id"] == root_span.trace_id + assert error_event["contexts"]["trace"]["span_id"] != root_span.span_id assert error_event["transaction"] == "dummy_task" assert "celery_task_id" in error_event["tags"] assert error_event["extra"]["celery-job"] == dict( @@ -190,17 +192,14 @@ def test_transaction_events(capture_events, init_celery, celery_invocation, task def dummy_task(x, y): return x / y - # XXX: For some reason the first call does not get instrumented properly. - celery_invocation(dummy_task, 1, 1) - events = capture_events() - with start_transaction(name="submission") as transaction: + with sentry_sdk.start_span(name="submission") as root_span: celery_invocation(dummy_task, 1, 0 if task_fails else 1) if task_fails: error_event = events.pop(0) - assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id + assert error_event["contexts"]["trace"]["trace_id"] == root_span.trace_id assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError" execution_event, submission_event = events @@ -211,8 +210,8 @@ def dummy_task(x, y): assert submission_event["transaction_info"] == {"source": "custom"} assert execution_event["type"] == submission_event["type"] == "transaction" - assert execution_event["contexts"]["trace"]["trace_id"] == transaction.trace_id - assert submission_event["contexts"]["trace"]["trace_id"] == transaction.trace_id + assert execution_event["contexts"]["trace"]["trace_id"] == root_span.trace_id + assert submission_event["contexts"]["trace"]["trace_id"] == root_span.trace_id if task_fails: assert execution_event["contexts"]["trace"]["status"] == "internal_error" @@ -220,28 +219,32 @@ def dummy_task(x, y): assert execution_event["contexts"]["trace"]["status"] == "ok" assert len(execution_event["spans"]) == 1 - assert ( - execution_event["spans"][0].items() - >= { - "trace_id": str(transaction.trace_id), - "same_process_as_parent": True, + assert execution_event["spans"][0] == ApproxDict( + { + "trace_id": str(root_span.trace_id), "op": "queue.process", "description": "dummy_task", - "data": ApproxDict(), - }.items() + } ) assert submission_event["spans"] == [ { - "data": ApproxDict(), + "data": { + "sentry.name": "dummy_task", + "sentry.op": "queue.submit.celery", + "sentry.origin": "auto.queue.celery", + "sentry.source": "custom", + "thread.id": mock.ANY, + "thread.name": mock.ANY, + }, "description": "dummy_task", "op": "queue.submit.celery", "origin": "auto.queue.celery", "parent_span_id": submission_event["contexts"]["trace"]["span_id"], - "same_process_as_parent": True, "span_id": submission_event["spans"][0]["span_id"], "start_timestamp": submission_event["spans"][0]["start_timestamp"], "timestamp": submission_event["spans"][0]["timestamp"], - "trace_id": str(transaction.trace_id), + "trace_id": str(root_span.trace_id), + "status": "ok", } ] @@ -275,11 +278,11 @@ def test_simple_no_propagation(capture_events, init_celery): def dummy_task(): 1 / 0 - with start_transaction() as transaction: + with sentry_sdk.start_span(name="task") as root_span: dummy_task.delay() (event,) = events - assert event["contexts"]["trace"]["trace_id"] != transaction.trace_id + assert event["contexts"]["trace"]["trace_id"] != root_span.trace_id assert event["transaction"] == "dummy_task" (exception,) = event["exception"]["values"] assert exception["type"] == "ZeroDivisionError" @@ -350,7 +353,7 @@ def dummy_task(self): runs.append(1) 1 / 0 - with start_transaction(name="submit_celery"): + with sentry_sdk.start_span(name="submit_celery"): # Curious: Cannot use delay() here or py2.7-celery-4.2 crashes res = dummy_task.apply_async() @@ -430,7 +433,7 @@ def dummy_task(self, x, y): def test_traces_sampler_gets_task_info_in_sampling_context( - init_celery, celery_invocation, DictionaryContaining # noqa:N803 + init_celery, celery_invocation ): traces_sampler = mock.Mock() celery = init_celery(traces_sampler=traces_sampler) @@ -445,11 +448,12 @@ def walk_dogs(x, y): walk_dogs, [["Maisey", "Charlie", "Bodhi", "Cory"], "Dog park round trip"], 1 ) - traces_sampler.assert_any_call( - # depending on the iteration of celery_invocation, the data might be - # passed as args or as kwargs, so make this generic - DictionaryContaining({"celery_job": dict(task="dog_walk", **args_kwargs)}) - ) + sampling_context = traces_sampler.call_args_list[0][0][0] + assert sampling_context["celery.job.task"] == "dog_walk" + for i, arg in enumerate(args_kwargs["args"]): + assert sampling_context[f"celery.job.args.{i}"] == str(arg) + for kwarg, value in args_kwargs["kwargs"].items(): + assert sampling_context[f"celery.job.kwargs.{kwarg}"] == str(value) def test_abstract_task(capture_events, celery, celery_invocation): @@ -468,7 +472,7 @@ def __call__(self, *args, **kwargs): def dummy_task(x, y): return x / y - with start_transaction(): + with sentry_sdk.start_span(name="celery"): celery_invocation(dummy_task, 1, 0) assert not events @@ -509,9 +513,8 @@ def test_baggage_propagation(init_celery): def dummy_task(self, x, y): return _get_headers(self) - # patch random.uniform to return a predictable sample_rand value with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.5): - with start_transaction() as transaction: + with sentry_sdk.start_span(name="task") as root_span: result = dummy_task.apply_async( args=(1, 0), headers={"baggage": "custom=value"}, @@ -520,7 +523,8 @@ def dummy_task(self, x, y): assert sorted(result["baggage"].split(",")) == sorted( [ "sentry-release=abcdef", - "sentry-trace_id={}".format(transaction.trace_id), + "sentry-trace_id={}".format(root_span.trace_id), + "sentry-transaction=task", "sentry-environment=production", "sentry-sample_rand=0.500000", "sentry-sample_rate=1.0", @@ -539,26 +543,42 @@ def test_sentry_propagate_traces_override(init_celery): propagate_traces=True, traces_sample_rate=1.0, release="abcdef" ) + # Since we're applying the task inline eagerly, + # we need to cleanup the otel context for this test. + # and since we patch build_tracer, we need to do this before that runs... + # TODO: the right way is to not test this inline + original_apply = Task.apply + + def cleaned_apply(*args, **kwargs): + token = context.attach(otel_trace.set_span_in_context(otel_trace.INVALID_SPAN)) + rv = original_apply(*args, **kwargs) + context.detach(token) + return rv + + Task.apply = cleaned_apply + @celery.task(name="dummy_task", bind=True) def dummy_task(self, message): trace_id = get_current_span().trace_id return trace_id - with start_transaction() as transaction: - transaction_trace_id = transaction.trace_id + with sentry_sdk.start_span(name="task") as root_span: + root_span_trace_id = root_span.trace_id # should propagate trace - task_transaction_id = dummy_task.apply_async( + task_trace_id = dummy_task.apply_async( args=("some message",), ).get() - assert transaction_trace_id == task_transaction_id + assert root_span_trace_id == task_trace_id, "Trace should be propagated" # should NOT propagate trace (overrides `propagate_traces` parameter in integration constructor) - task_transaction_id = dummy_task.apply_async( + task_trace_id = dummy_task.apply_async( args=("another message",), headers={"sentry-propagate-traces": False}, ).get() - assert transaction_trace_id != task_transaction_id + assert root_span_trace_id != task_trace_id, "Trace should NOT be propagated" + + Task.apply = original_apply def test_apply_async_manually_span(sentry_init): @@ -595,7 +615,7 @@ def example_task(): def test_messaging_destination_name_default_exchange( mock_request, routing_key, init_celery, capture_events ): - celery_app = init_celery(enable_tracing=True) + celery_app = init_celery(traces_sample_rate=1.0) events = capture_events() mock_request.delivery_info = {"routing_key": routing_key, "exchange": ""} @@ -619,7 +639,7 @@ def test_messaging_destination_name_nondefault_exchange( that the routing key is the queue name. Other exchanges may not guarantee this behavior. """ - celery_app = init_celery(enable_tracing=True) + celery_app = init_celery(traces_sample_rate=1.0) events = capture_events() mock_request.delivery_info = {"routing_key": "celery", "exchange": "custom"} @@ -634,7 +654,7 @@ def task(): ... def test_messaging_id(init_celery, capture_events): - celery = init_celery(enable_tracing=True) + celery = init_celery(traces_sample_rate=1.0) events = capture_events() @celery.task @@ -648,7 +668,7 @@ def example_task(): ... def test_retry_count_zero(init_celery, capture_events): - celery = init_celery(enable_tracing=True) + celery = init_celery(traces_sample_rate=1.0) events = capture_events() @celery.task() @@ -665,7 +685,7 @@ def task(): ... def test_retry_count_nonzero(mock_request, init_celery, capture_events): mock_request.retries = 3 - celery = init_celery(enable_tracing=True) + celery = init_celery(traces_sample_rate=1.0) events = capture_events() @celery.task() @@ -680,7 +700,7 @@ def task(): ... @pytest.mark.parametrize("system", ("redis", "amqp")) def test_messaging_system(system, init_celery, capture_events): - celery = init_celery(enable_tracing=True) + celery = init_celery(traces_sample_rate=1.0) events = capture_events() # Does not need to be a real URL, since we use always eager @@ -705,14 +725,14 @@ def publish(*args, **kwargs): monkeypatch.setattr(kombu.messaging.Producer, "_publish", publish) - sentry_init(integrations=[CeleryIntegration()], enable_tracing=True) + sentry_init(integrations=[CeleryIntegration()], traces_sample_rate=1.0) celery = Celery(__name__, broker=f"{system}://example.com") # noqa: E231 events = capture_events() @celery.task() def task(): ... - with start_transaction(): + with sentry_sdk.start_span(name="task"): task.apply_async() (event,) = events @@ -743,7 +763,7 @@ def task(): ... def tests_span_origin_consumer(init_celery, capture_events): - celery = init_celery(enable_tracing=True) + celery = init_celery(traces_sample_rate=1.0) celery.conf.broker_url = "redis://example.com" # noqa: E231 events = capture_events() @@ -767,7 +787,7 @@ def publish(*args, **kwargs): monkeypatch.setattr(kombu.messaging.Producer, "_publish", publish) - sentry_init(integrations=[CeleryIntegration()], enable_tracing=True) + sentry_init(integrations=[CeleryIntegration()], traces_sample_rate=1.0) celery = Celery(__name__, broker="redis://example.com") # noqa: E231 events = capture_events() @@ -775,7 +795,7 @@ def publish(*args, **kwargs): @celery.task() def task(): ... - with start_transaction(name="custom_transaction"): + with sentry_sdk.start_span(name="custom_transaction"): task.apply_async() (event,) = events @@ -796,12 +816,12 @@ def test_send_task_wrapped( capture_events, reset_integrations, ): - sentry_init(integrations=[CeleryIntegration()], enable_tracing=True) + sentry_init(integrations=[CeleryIntegration()], traces_sample_rate=1.0) celery = Celery(__name__, broker="redis://example.com") # noqa: E231 events = capture_events() - with sentry_sdk.start_transaction(name="custom_transaction"): + with sentry_sdk.start_span(name="custom_transaction"): celery.send_task("very_creative_task_name", args=(1, 2), kwargs={"foo": "bar"}) (call,) = patched_send_task.call_args_list # We should have exactly one call diff --git a/tests/integrations/celery/test_update_celery_task_headers.py b/tests/integrations/celery/test_update_celery_task_headers.py index 705c00de58..5b76bee076 100644 --- a/tests/integrations/celery/test_update_celery_task_headers.py +++ b/tests/integrations/celery/test_update_celery_task_headers.py @@ -7,6 +7,7 @@ from sentry_sdk.integrations.celery import _update_celery_task_headers import sentry_sdk from sentry_sdk.tracing_utils import Baggage +from tests.conftest import SortedBaggage BAGGAGE_VALUE = ( @@ -71,11 +72,11 @@ def test_monitor_beat_tasks_with_headers(monitor_beat_tasks): def test_span_with_transaction(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) headers = {} monitor_beat_tasks = False - with sentry_sdk.start_transaction(name="test_transaction") as transaction: + with sentry_sdk.start_span(name="test_transaction") as transaction: with sentry_sdk.start_span(op="test_span") as span: outgoing_headers = _update_celery_task_headers( headers, span, monitor_beat_tasks @@ -83,21 +84,22 @@ def test_span_with_transaction(sentry_init): assert outgoing_headers["sentry-trace"] == span.to_traceparent() assert outgoing_headers["headers"]["sentry-trace"] == span.to_traceparent() - assert outgoing_headers["baggage"] == transaction.get_baggage().serialize() - assert ( - outgoing_headers["headers"]["baggage"] - == transaction.get_baggage().serialize() + assert outgoing_headers["baggage"] == SortedBaggage( + transaction.get_baggage().serialize() + ) + assert outgoing_headers["headers"]["baggage"] == SortedBaggage( + transaction.get_baggage().serialize() ) def test_span_with_transaction_custom_headers(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) headers = { "baggage": BAGGAGE_VALUE, "sentry-trace": SENTRY_TRACE_VALUE, } - with sentry_sdk.start_transaction(name="test_transaction") as transaction: + with sentry_sdk.start_span(name="test_transaction") as transaction: with sentry_sdk.start_span(op="test_span") as span: outgoing_headers = _update_celery_task_headers(headers, span, False) @@ -117,11 +119,11 @@ def test_span_with_transaction_custom_headers(sentry_init): if x is not None and x != "" ] ) - assert outgoing_headers["baggage"] == combined_baggage.serialize( - include_third_party=True + assert outgoing_headers["baggage"] == SortedBaggage( + combined_baggage.serialize(include_third_party=True) ) - assert outgoing_headers["headers"]["baggage"] == combined_baggage.serialize( - include_third_party=True + assert outgoing_headers["headers"]["baggage"] == SortedBaggage( + combined_baggage.serialize(include_third_party=True) ) @@ -190,39 +192,3 @@ def test_celery_trace_propagation_traces_sample_rate( else: assert "sentry-monitor-start-timestamp-s" not in outgoing_headers assert "sentry-monitor-start-timestamp-s" not in outgoing_headers["headers"] - - -@pytest.mark.parametrize( - "enable_tracing,monitor_beat_tasks", - list(itertools.product([None, True, False], [True, False])), -) -def test_celery_trace_propagation_enable_tracing( - sentry_init, enable_tracing, monitor_beat_tasks -): - """ - The celery integration does not check the traces_sample_rate. - By default traces_sample_rate is None which means "do not propagate traces". - But the celery integration does not check this value. - The Celery integration has its own mechanism to propagate traces: - https://docs.sentry.io/platforms/python/integrations/celery/#distributed-traces - """ - sentry_init(enable_tracing=enable_tracing) - - headers = {} - span = None - - scope = sentry_sdk.get_isolation_scope() - - outgoing_headers = _update_celery_task_headers(headers, span, monitor_beat_tasks) - - assert outgoing_headers["sentry-trace"] == scope.get_traceparent() - assert outgoing_headers["headers"]["sentry-trace"] == scope.get_traceparent() - assert outgoing_headers["baggage"] == scope.get_baggage().serialize() - assert outgoing_headers["headers"]["baggage"] == scope.get_baggage().serialize() - - if monitor_beat_tasks: - assert "sentry-monitor-start-timestamp-s" in outgoing_headers - assert "sentry-monitor-start-timestamp-s" in outgoing_headers["headers"] - else: - assert "sentry-monitor-start-timestamp-s" not in outgoing_headers - assert "sentry-monitor-start-timestamp-s" not in outgoing_headers["headers"] diff --git a/tests/integrations/clickhouse_driver/test_clickhouse_driver.py b/tests/integrations/clickhouse_driver/test_clickhouse_driver.py index 0675ad9ff5..6d89d85c91 100644 --- a/tests/integrations/clickhouse_driver/test_clickhouse_driver.py +++ b/tests/integrations/clickhouse_driver/test_clickhouse_driver.py @@ -1,14 +1,14 @@ """ Tests need a local clickhouse instance running, this can best be done using ```sh -docker run -d -p 18123:8123 -p9000:9000 --name clickhouse-test --ulimit nofile=262144:262144 --rm clickhouse/clickhouse-server +docker run -d -p 18123:8123 -p9000:9000 --name clickhouse-test --ulimit nofile=262144:262144 --rm clickhouse ``` """ import clickhouse_driver from clickhouse_driver import Client, connect -from sentry_sdk import start_transaction, capture_message +from sentry_sdk import start_span, capture_message from sentry_sdk.integrations.clickhouse_driver import ClickhouseDriverIntegration from tests.conftest import ApproxDict @@ -233,7 +233,7 @@ def test_clickhouse_client_spans( transaction_trace_id = None transaction_span_id = None - with start_transaction(name="test_clickhouse_transaction") as transaction: + with start_span(name="test_clickhouse_transaction") as transaction: transaction_trace_id = transaction.trace_id transaction_span_id = transaction.span_id @@ -256,13 +256,15 @@ def test_clickhouse_client_spans( "origin": "auto.db.clickhouse_driver", "description": "DROP TABLE IF EXISTS test", "data": { + "sentry.name": "DROP TABLE IF EXISTS test", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -271,13 +273,15 @@ def test_clickhouse_client_spans( "origin": "auto.db.clickhouse_driver", "description": "CREATE TABLE test (x Int32) ENGINE = Memory", "data": { + "sentry.name": "CREATE TABLE test (x Int32) ENGINE = Memory", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -286,13 +290,15 @@ def test_clickhouse_client_spans( "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -301,13 +307,15 @@ def test_clickhouse_client_spans( "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -316,13 +324,15 @@ def test_clickhouse_client_spans( "origin": "auto.db.clickhouse_driver", "description": "SELECT sum(x) FROM test WHERE x > 150", "data": { + "sentry.name": "SELECT sum(x) FROM test WHERE x > 150", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -338,13 +348,13 @@ def test_clickhouse_client_spans( span.pop("span_id", None) span.pop("start_timestamp", None) span.pop("timestamp", None) + span.pop("same_process_as_parent", None) + span.pop("status", None) assert event["spans"] == expected_spans -def test_clickhouse_client_spans_with_pii( - sentry_init, capture_events, capture_envelopes -) -> None: +def test_clickhouse_client_spans_with_pii(sentry_init, capture_events) -> None: sentry_init( integrations=[ClickhouseDriverIntegration()], _experiments={"record_sql_params": True}, @@ -356,7 +366,7 @@ def test_clickhouse_client_spans_with_pii( transaction_trace_id = None transaction_span_id = None - with start_transaction(name="test_clickhouse_transaction") as transaction: + with start_span(name="test_clickhouse_transaction") as transaction: transaction_trace_id = transaction.trace_id transaction_span_id = transaction.span_id @@ -379,14 +389,17 @@ def test_clickhouse_client_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "DROP TABLE IF EXISTS test", "data": { + "sentry.name": "DROP TABLE IF EXISTS test", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, + "db.query.text": "DROP TABLE IF EXISTS test", "db.result": [], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -395,14 +408,17 @@ def test_clickhouse_client_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "CREATE TABLE test (x Int32) ENGINE = Memory", "data": { + "sentry.name": "CREATE TABLE test (x Int32) ENGINE = Memory", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "CREATE TABLE test (x Int32) ENGINE = Memory", + "db.result": [], "server.address": "localhost", "server.port": 9000, - "db.result": [], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -411,14 +427,17 @@ def test_clickhouse_client_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "INSERT INTO test (x) VALUES", + "db.params": '[{"x": 100}]', "server.address": "localhost", "server.port": 9000, - "db.params": [{"x": 100}], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -427,14 +446,16 @@ def test_clickhouse_client_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "INSERT INTO test (x) VALUES", "server.address": "localhost", "server.port": 9000, - "db.params": [[170], [200]], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -443,15 +464,18 @@ def test_clickhouse_client_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "SELECT sum(x) FROM test WHERE x > 150", "data": { + "sentry.name": "SELECT sum(x) FROM test WHERE x > 150", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.params": '{"minv": 150}', + "db.query.text": "SELECT sum(x) FROM test WHERE x > 150", + "db.result": "[[370]]", "server.address": "localhost", "server.port": 9000, - "db.params": {"minv": 150}, - "db.result": [[370]], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -467,6 +491,8 @@ def test_clickhouse_client_spans_with_pii( span.pop("span_id", None) span.pop("start_timestamp", None) span.pop("timestamp", None) + span.pop("same_process_as_parent", None) + span.pop("status", None) assert event["spans"] == expected_spans @@ -681,7 +707,7 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) transaction_trace_id = None transaction_span_id = None - with start_transaction(name="test_clickhouse_transaction") as transaction: + with start_span(name="test_clickhouse_transaction") as transaction: transaction_trace_id = transaction.trace_id transaction_span_id = transaction.span_id @@ -704,13 +730,15 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) "origin": "auto.db.clickhouse_driver", "description": "DROP TABLE IF EXISTS test", "data": { + "sentry.name": "DROP TABLE IF EXISTS test", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -719,13 +747,15 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) "origin": "auto.db.clickhouse_driver", "description": "CREATE TABLE test (x Int32) ENGINE = Memory", "data": { + "sentry.name": "CREATE TABLE test (x Int32) ENGINE = Memory", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -734,13 +764,15 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -749,13 +781,15 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -764,13 +798,15 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) "origin": "auto.db.clickhouse_driver", "description": "SELECT sum(x) FROM test WHERE x > 150", "data": { + "sentry.name": "SELECT sum(x) FROM test WHERE x > 150", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", "server.address": "localhost", "server.port": 9000, }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -786,6 +822,7 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) span.pop("span_id", None) span.pop("start_timestamp", None) span.pop("timestamp", None) + span.pop("status") assert event["spans"] == expected_spans @@ -804,7 +841,7 @@ def test_clickhouse_dbapi_spans_with_pii( transaction_trace_id = None transaction_span_id = None - with start_transaction(name="test_clickhouse_transaction") as transaction: + with start_span(name="test_clickhouse_transaction") as transaction: transaction_trace_id = transaction.trace_id transaction_span_id = transaction.span_id @@ -827,14 +864,17 @@ def test_clickhouse_dbapi_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "DROP TABLE IF EXISTS test", "data": { + "sentry.name": "DROP TABLE IF EXISTS test", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "DROP TABLE IF EXISTS test", + "db.result": "[[], []]", "server.address": "localhost", "server.port": 9000, - "db.result": [[], []], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -843,14 +883,17 @@ def test_clickhouse_dbapi_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "CREATE TABLE test (x Int32) ENGINE = Memory", "data": { + "sentry.name": "CREATE TABLE test (x Int32) ENGINE = Memory", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "CREATE TABLE test (x Int32) ENGINE = Memory", + "db.result": "[[], []]", "server.address": "localhost", "server.port": 9000, - "db.result": [[], []], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -859,14 +902,17 @@ def test_clickhouse_dbapi_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "INSERT INTO test (x) VALUES", + "db.params": '[{"x": 100}]', "server.address": "localhost", "server.port": 9000, - "db.params": [{"x": 100}], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -875,14 +921,17 @@ def test_clickhouse_dbapi_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "INSERT INTO test (x) VALUES", "data": { + "sentry.name": "INSERT INTO test (x) VALUES", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "INSERT INTO test (x) VALUES", + "db.params": "[[170], [200]]", "server.address": "localhost", "server.port": 9000, - "db.params": [[170], [200]], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -891,15 +940,18 @@ def test_clickhouse_dbapi_spans_with_pii( "origin": "auto.db.clickhouse_driver", "description": "SELECT sum(x) FROM test WHERE x > 150", "data": { + "sentry.name": "SELECT sum(x) FROM test WHERE x > 150", + "sentry.origin": "auto.db.clickhouse_driver", + "sentry.op": "db", "db.system": "clickhouse", "db.name": "", "db.user": "default", + "db.query.text": "SELECT sum(x) FROM test WHERE x > 150", + "db.params": '{"minv": 150}', + "db.result": '[[[370]], [["sum(x)", "Int64"]]]', "server.address": "localhost", "server.port": 9000, - "db.params": {"minv": 150}, - "db.result": [[[370]], [["sum(x)", "Int64"]]], }, - "same_process_as_parent": True, "trace_id": transaction_trace_id, "parent_span_id": transaction_span_id, }, @@ -915,6 +967,8 @@ def test_clickhouse_dbapi_spans_with_pii( span.pop("span_id", None) span.pop("start_timestamp", None) span.pop("timestamp", None) + span.pop("same_process_as_parent", None) + span.pop("status", None) assert event["spans"] == expected_spans @@ -927,7 +981,7 @@ def test_span_origin(sentry_init, capture_events, capture_envelopes) -> None: events = capture_events() - with start_transaction(name="test_clickhouse_transaction"): + with start_span(name="test_clickhouse_transaction"): conn = connect("clickhouse://localhost") cursor = conn.cursor() cursor.execute("SELECT 1") diff --git a/tests/integrations/cohere/test_cohere.py b/tests/integrations/cohere/test_cohere.py index c0dff2214e..25d1c30cf4 100644 --- a/tests/integrations/cohere/test_cohere.py +++ b/tests/integrations/cohere/test_cohere.py @@ -4,7 +4,7 @@ import pytest from cohere import Client, ChatMessage -from sentry_sdk import start_transaction +from sentry_sdk import start_span from sentry_sdk.integrations.cohere import CohereIntegration from unittest import mock # python 3.3 and above @@ -41,7 +41,7 @@ def test_nonstreaming_chat( ) ) - with start_transaction(name="cohere tx"): + with start_span(name="cohere tx"): response = client.chat( model="some-model", chat_history=[ChatMessage(role="SYSTEM", message="some context")], @@ -56,16 +56,17 @@ def test_nonstreaming_chat( assert span["data"]["ai.model_id"] == "some-model" if send_default_pii and include_prompts: - assert "some context" in span["data"]["ai.input_messages"][0]["content"] - assert "hello" in span["data"]["ai.input_messages"][1]["content"] + input_messages = json.loads(span["data"]["ai.input_messages"]) + assert "some context" in input_messages[0]["content"] + assert "hello" in input_messages[1]["content"] assert "the model response" in span["data"]["ai.responses"] else: assert "ai.input_messages" not in span["data"] assert "ai.responses" not in span["data"] - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 10 - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 20 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 30 + assert span["data"]["ai.completion_tokens.used"] == 10 + assert span["data"]["ai.prompt_tokens.used"] == 20 + assert span["data"]["ai.total_tokens.used"] == 30 # noinspection PyTypeChecker @@ -109,7 +110,7 @@ def test_streaming_chat(sentry_init, capture_events, send_default_pii, include_p ) ) - with start_transaction(name="cohere tx"): + with start_span(name="cohere tx"): responses = list( client.chat_stream( model="some-model", @@ -127,16 +128,17 @@ def test_streaming_chat(sentry_init, capture_events, send_default_pii, include_p assert span["data"]["ai.model_id"] == "some-model" if send_default_pii and include_prompts: - assert "some context" in span["data"]["ai.input_messages"][0]["content"] - assert "hello" in span["data"]["ai.input_messages"][1]["content"] + input_messages = json.loads(span["data"]["ai.input_messages"]) + assert "some context" in input_messages[0]["content"] + assert "hello" in input_messages[1]["content"] assert "the model response" in span["data"]["ai.responses"] else: assert "ai.input_messages" not in span["data"] assert "ai.responses" not in span["data"] - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 10 - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 20 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 30 + assert span["data"]["ai.completion_tokens.used"] == 10 + assert span["data"]["ai.prompt_tokens.used"] == 20 + assert span["data"]["ai.total_tokens.used"] == 30 def test_bad_chat(sentry_init, capture_events): @@ -184,7 +186,7 @@ def test_embed(sentry_init, capture_events, send_default_pii, include_prompts): ) ) - with start_transaction(name="cohere tx"): + with start_span(name="cohere tx"): response = client.embed(texts=["hello"], model="text-embedding-3-large") assert len(response.embeddings[0]) == 3 @@ -198,8 +200,8 @@ def test_embed(sentry_init, capture_events, send_default_pii, include_prompts): else: assert "ai.input_messages" not in span["data"] - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 10 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 10 + assert span["data"]["ai.prompt_tokens.used"] == 10 + assert span["data"]["ai.total_tokens.used"] == 10 def test_span_origin_chat(sentry_init, capture_events): @@ -225,7 +227,7 @@ def test_span_origin_chat(sentry_init, capture_events): ) ) - with start_transaction(name="cohere tx"): + with start_span(name="cohere tx"): client.chat( model="some-model", chat_history=[ChatMessage(role="SYSTEM", message="some context")], @@ -263,7 +265,7 @@ def test_span_origin_embed(sentry_init, capture_events): ) ) - with start_transaction(name="cohere tx"): + with start_span(name="cohere tx"): client.embed(texts=["hello"], model="text-embedding-3-large") (event,) = events diff --git a/tests/integrations/conftest.py b/tests/integrations/conftest.py index 7ac43b0efe..ab0c096a55 100644 --- a/tests/integrations/conftest.py +++ b/tests/integrations/conftest.py @@ -6,19 +6,8 @@ def capture_exceptions(monkeypatch): def inner(): errors = set() - old_capture_event_hub = sentry_sdk.Hub.capture_event old_capture_event_scope = sentry_sdk.Scope.capture_event - def capture_event_hub(self, event, hint=None, scope=None): - """ - Can be removed when we remove push_scope and the Hub from the SDK. - """ - if hint: - if "exc_info" in hint: - error = hint["exc_info"][1] - errors.add(error) - return old_capture_event_hub(self, event, hint=hint, scope=scope) - def capture_event_scope(self, event, hint=None, scope=None): if hint: if "exc_info" in hint: @@ -26,7 +15,6 @@ def capture_event_scope(self, event, hint=None, scope=None): errors.add(error) return old_capture_event_scope(self, event, hint=hint, scope=scope) - monkeypatch.setattr(sentry_sdk.Hub, "capture_event", capture_event_hub) monkeypatch.setattr(sentry_sdk.Scope, "capture_event", capture_event_scope) return errors diff --git a/tests/integrations/django/asgi/test_asgi.py b/tests/integrations/django/asgi/test_asgi.py index 82eae30b1d..c10a6b7b8e 100644 --- a/tests/integrations/django/asgi/test_asgi.py +++ b/tests/integrations/django/asgi/test_asgi.py @@ -335,9 +335,7 @@ async def test_has_trace_if_performance_enabled(sentry_init, capture_events): django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1" ) async def test_has_trace_if_performance_disabled(sentry_init, capture_events): - sentry_init( - integrations=[DjangoIntegration()], - ) + sentry_init(integrations=[DjangoIntegration()]) events = capture_events() @@ -400,9 +398,7 @@ async def test_trace_from_headers_if_performance_enabled(sentry_init, capture_ev django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1" ) async def test_trace_from_headers_if_performance_disabled(sentry_init, capture_events): - sentry_init( - integrations=[DjangoIntegration()], - ) + sentry_init(integrations=[DjangoIntegration()]) events = capture_events() @@ -662,7 +658,12 @@ async def test_transaction_http_method_default( By default OPTIONS and HEAD requests do not create a transaction. """ sentry_init( - integrations=[DjangoIntegration()], + integrations=[ + DjangoIntegration( + middleware_spans=False, + signals_spans=False, + ), + ], traces_sample_rate=1.0, ) events = capture_events() @@ -691,6 +692,8 @@ async def test_transaction_http_method_custom(sentry_init, capture_events, appli sentry_init( integrations=[ DjangoIntegration( + middleware_spans=False, + signals_spans=False, http_methods_to_capture=( "OPTIONS", "head", diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index 0e3f700105..5b75bbb6af 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -10,7 +10,6 @@ from werkzeug.test import Client from django import VERSION as DJANGO_VERSION -from django.contrib.auth.models import User from django.core.management import execute_from_command_line from django.db.utils import OperationalError, ProgrammingError, DataError from django.http.request import RawPostDataException @@ -292,6 +291,9 @@ def test_user_captured(sentry_init, client, capture_events): def test_queryset_repr(sentry_init, capture_events): sentry_init(integrations=[DjangoIntegration()]) events = capture_events() + + from django.contrib.auth.models import User + User.objects.create_user("john", "lennon@thebeatles.com", "johnpassword") try: @@ -932,6 +934,11 @@ def test_render_spans(sentry_init, client, capture_events, render_span_tree): transaction = events[0] assert expected_line in render_span_tree(transaction) + render_span = next( + span for span in transaction["spans"] if span["op"] == "template.render" + ) + assert "context.user_age" in render_span["data"] + if DJANGO_VERSION >= (1, 10): EXPECTED_MIDDLEWARE_SPANS = """\ @@ -1117,6 +1124,9 @@ def test_csrf(sentry_init, client): assert content == b"ok" +# This test is forked because it doesn't clean up after itself properly and makes +# other tests fail to resolve routes +@pytest.mark.forked @pytest.mark.skipif(DJANGO_VERSION < (2, 0), reason="Requires Django > 2.0") def test_custom_urlconf_middleware( settings, sentry_init, client, capture_events, render_span_tree @@ -1206,14 +1216,19 @@ def test_transaction_http_method_default(sentry_init, client, capture_events): By default OPTIONS and HEAD requests do not create a transaction. """ sentry_init( - integrations=[DjangoIntegration()], + integrations=[ + DjangoIntegration( + middleware_spans=False, + signals_spans=False, + ) + ], traces_sample_rate=1.0, ) events = capture_events() - client.get("/nomessage") - client.options("/nomessage") - client.head("/nomessage") + client.get(reverse("nomessage")) + client.options(reverse("nomessage")) + client.head(reverse("nomessage")) (event,) = events @@ -1229,6 +1244,8 @@ def test_transaction_http_method_custom(sentry_init, client, capture_events): "OPTIONS", "head", ), # capitalization does not matter + middleware_spans=False, + signals_spans=False, ) ], traces_sample_rate=1.0, diff --git a/tests/integrations/django/test_cache_module.py b/tests/integrations/django/test_cache_module.py index 263f9f36f8..2d8cc3d5d6 100644 --- a/tests/integrations/django/test_cache_module.py +++ b/tests/integrations/django/test_cache_module.py @@ -511,7 +511,9 @@ def test_cache_spans_item_size(sentry_init, client, capture_events, use_django_c @pytest.mark.forked @pytest_mark_django_db_decorator() -def test_cache_spans_get_many(sentry_init, capture_events, use_django_caching): +def test_cache_spans_get_many( + sentry_init, capture_events, use_django_caching, render_span_tree +): sentry_init( integrations=[ DjangoIntegration( @@ -528,7 +530,7 @@ def test_cache_spans_get_many(sentry_init, capture_events, use_django_caching): from django.core.cache import cache - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="caches"): cache.get_many([f"S{id}", f"S{id+1}"]) cache.set(f"S{id}", "Sensitive1") cache.get_many([f"S{id}", f"S{id+1}"]) @@ -536,31 +538,26 @@ def test_cache_spans_get_many(sentry_init, capture_events, use_django_caching): (transaction,) = events assert len(transaction["spans"]) == 7 - assert transaction["spans"][0]["op"] == "cache.get" - assert transaction["spans"][0]["description"] == f"S{id}, S{id+1}" - - assert transaction["spans"][1]["op"] == "cache.get" - assert transaction["spans"][1]["description"] == f"S{id}" - - assert transaction["spans"][2]["op"] == "cache.get" - assert transaction["spans"][2]["description"] == f"S{id+1}" - - assert transaction["spans"][3]["op"] == "cache.put" - assert transaction["spans"][3]["description"] == f"S{id}" - - assert transaction["spans"][4]["op"] == "cache.get" - assert transaction["spans"][4]["description"] == f"S{id}, S{id+1}" - - assert transaction["spans"][5]["op"] == "cache.get" - assert transaction["spans"][5]["description"] == f"S{id}" - - assert transaction["spans"][6]["op"] == "cache.get" - assert transaction["spans"][6]["description"] == f"S{id+1}" + assert ( + render_span_tree(transaction) + == f"""\ +- op="caches": description=null + - op="cache.get": description="S{id}, S{id+1}" + - op="cache.get": description="S{id}" + - op="cache.get": description="S{id+1}" + - op="cache.put": description="S{id}" + - op="cache.get": description="S{id}, S{id+1}" + - op="cache.get": description="S{id}" + - op="cache.get": description="S{id+1}"\ +""" # noqa: E221 + ) @pytest.mark.forked @pytest_mark_django_db_decorator() -def test_cache_spans_set_many(sentry_init, capture_events, use_django_caching): +def test_cache_spans_set_many( + sentry_init, capture_events, use_django_caching, render_span_tree +): sentry_init( integrations=[ DjangoIntegration( @@ -577,24 +574,23 @@ def test_cache_spans_set_many(sentry_init, capture_events, use_django_caching): from django.core.cache import cache - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="caches"): cache.set_many({f"S{id}": "Sensitive1", f"S{id+1}": "Sensitive2"}) cache.get(f"S{id}") (transaction,) = events assert len(transaction["spans"]) == 4 - assert transaction["spans"][0]["op"] == "cache.put" - assert transaction["spans"][0]["description"] == f"S{id}, S{id+1}" - - assert transaction["spans"][1]["op"] == "cache.put" - assert transaction["spans"][1]["description"] == f"S{id}" - - assert transaction["spans"][2]["op"] == "cache.put" - assert transaction["spans"][2]["description"] == f"S{id+1}" - - assert transaction["spans"][3]["op"] == "cache.get" - assert transaction["spans"][3]["description"] == f"S{id}" + assert ( + render_span_tree(transaction) + == f"""\ +- op="caches": description=null + - op="cache.put": description="S{id}, S{id+1}" + - op="cache.put": description="S{id}" + - op="cache.put": description="S{id+1}" + - op="cache.get": description="S{id}"\ +""" # noqa: E221 + ) @pytest.mark.forked diff --git a/tests/integrations/django/test_db_query_data.py b/tests/integrations/django/test_db_query_data.py index 41ad9d5e1c..82f1f339a6 100644 --- a/tests/integrations/django/test_db_query_data.py +++ b/tests/integrations/django/test_db_query_data.py @@ -1,6 +1,7 @@ import os import pytest +from contextlib import contextmanager from datetime import datetime from unittest import mock @@ -12,9 +13,10 @@ except ImportError: from django.core.urlresolvers import reverse +from freezegun import freeze_time from werkzeug.test import Client -from sentry_sdk import start_transaction +from sentry_sdk import start_span from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.django import DjangoIntegration from sentry_sdk.tracing_utils import record_sql_queries @@ -346,27 +348,24 @@ def test_no_query_source_if_duration_too_short(sentry_init, client, capture_even events = capture_events() - class fake_record_sql_queries: # noqa: N801 - def __init__(self, *args, **kwargs): - with record_sql_queries(*args, **kwargs) as span: - self.span = span - - self.span.start_timestamp = datetime(2024, 1, 1, microsecond=0) - self.span.timestamp = datetime(2024, 1, 1, microsecond=99999) - - def __enter__(self): - return self.span - - def __exit__(self, type, value, traceback): - pass + def fake_start_span(*args, **kwargs): # noqa: N801 + with freeze_time(datetime(2024, 1, 1, microsecond=0)): + return start_span(*args, **kwargs) - with mock.patch( - "sentry_sdk.integrations.django.record_sql_queries", - fake_record_sql_queries, - ): - _, status, _ = unpack_werkzeug_response( - client.get(reverse("postgres_select_orm")) - ) + @contextmanager + def fake_record_sql_queries(*args, **kwargs): # noqa: N801 + with freeze_time(datetime(2024, 1, 1, microsecond=99999)): + with record_sql_queries(*args, **kwargs) as span: + yield span + + with mock.patch("sentry_sdk.start_span", fake_start_span): + with mock.patch( + "sentry_sdk.integrations.django.record_sql_queries", + fake_record_sql_queries, + ): + _, status, _ = unpack_werkzeug_response( + client.get(reverse("postgres_select_orm")) + ) assert status == "200 OK" @@ -404,27 +403,24 @@ def test_query_source_if_duration_over_threshold(sentry_init, client, capture_ev events = capture_events() - class fake_record_sql_queries: # noqa: N801 - def __init__(self, *args, **kwargs): - with record_sql_queries(*args, **kwargs) as span: - self.span = span - - self.span.start_timestamp = datetime(2024, 1, 1, microsecond=0) - self.span.timestamp = datetime(2024, 1, 1, microsecond=101000) - - def __enter__(self): - return self.span - - def __exit__(self, type, value, traceback): - pass + def fake_start_span(*args, **kwargs): # noqa: N801 + with freeze_time(datetime(2024, 1, 1, microsecond=0)): + return start_span(*args, **kwargs) - with mock.patch( - "sentry_sdk.integrations.django.record_sql_queries", - fake_record_sql_queries, - ): - _, status, _ = unpack_werkzeug_response( - client.get(reverse("postgres_select_orm")) - ) + @contextmanager + def fake_record_sql_queries(*args, **kwargs): # noqa: N801 + with freeze_time(datetime(2024, 1, 1, microsecond=100001)): + with record_sql_queries(*args, **kwargs) as span: + yield span + + with mock.patch("sentry_sdk.start_span", fake_start_span): + with mock.patch( + "sentry_sdk.integrations.django.record_sql_queries", + fake_record_sql_queries, + ): + _, status, _ = unpack_werkzeug_response( + client.get(reverse("postgres_select_orm")) + ) assert status == "200 OK" @@ -500,7 +496,7 @@ def test_db_span_origin_executemany(sentry_init, client, capture_events): if "postgres" not in connections: pytest.skip("postgres tests disabled") - with start_transaction(name="test_transaction"): + with start_span(name="test_transaction"): from django.db import connection, transaction cursor = connection.cursor() diff --git a/tests/integrations/django/test_transactions.py b/tests/integrations/django/test_transactions.py index 14f8170fc3..0eaf99dc23 100644 --- a/tests/integrations/django/test_transactions.py +++ b/tests/integrations/django/test_transactions.py @@ -21,6 +21,7 @@ included_url_conf = ((re_path(r"^foo/bar/(?P[\w]+)", lambda x: ""),), "") from sentry_sdk.integrations.django.transactions import RavenResolver +from tests.integrations.django.myapp.wsgi import application # noqa: F401 example_url_conf = ( diff --git a/tests/integrations/fastapi/test_fastapi.py b/tests/integrations/fastapi/test_fastapi.py index 95838b1009..1c40abedcb 100644 --- a/tests/integrations/fastapi/test_fastapi.py +++ b/tests/integrations/fastapi/test_fastapi.py @@ -20,7 +20,6 @@ FASTAPI_VERSION = parse_version(fastapi.__version__) from tests.integrations.conftest import parametrize_test_configurable_status_codes -from tests.integrations.starlette import test_starlette def fastapi_app_factory(): @@ -528,48 +527,6 @@ def test_transaction_name_in_middleware( ) -@test_starlette.parametrize_test_configurable_status_codes_deprecated -def test_configurable_status_codes_deprecated( - sentry_init, - capture_events, - failed_request_status_codes, - status_code, - expected_error, -): - with pytest.warns(DeprecationWarning): - starlette_integration = StarletteIntegration( - failed_request_status_codes=failed_request_status_codes - ) - - with pytest.warns(DeprecationWarning): - fast_api_integration = FastApiIntegration( - failed_request_status_codes=failed_request_status_codes - ) - - sentry_init( - integrations=[ - starlette_integration, - fast_api_integration, - ] - ) - - events = capture_events() - - app = FastAPI() - - @app.get("/error") - async def _error(): - raise HTTPException(status_code) - - client = TestClient(app) - client.get("/error") - - if expected_error: - assert len(events) == 1 - else: - assert not events - - @pytest.mark.skipif( FASTAPI_VERSION < (0, 80), reason="Requires FastAPI >= 0.80, because earlier versions do not support HTTP 'HEAD' requests", diff --git a/tests/integrations/flask/test_flask.py b/tests/integrations/flask/test_flask.py index 6febb12b8b..a95393c585 100644 --- a/tests/integrations/flask/test_flask.py +++ b/tests/integrations/flask/test_flask.py @@ -285,7 +285,7 @@ def index(): try: raise ValueError("stuff") except Exception: - logging.exception("stuff happened") + sentry_sdk.capture_exception() 1 / 0 envelopes = capture_envelopes() @@ -751,12 +751,14 @@ def hi_tx(): assert transaction_event["type"] == "transaction" assert transaction_event["transaction"] == "hi_tx" + assert transaction_event["transaction_info"] == {"source": "component"} assert transaction_event["contexts"]["trace"]["status"] == "ok" assert transaction_event["tags"]["view"] == "yes" assert transaction_event["tags"]["before_request"] == "yes" assert message_event["message"] == "hi" assert message_event["transaction"] == "hi_tx" + assert message_event["transaction_info"] == {"source": "component"} assert message_event["tags"]["view"] == "yes" assert message_event["tags"]["before_request"] == "yes" @@ -873,7 +875,12 @@ def index(): def test_request_not_modified_by_reference(sentry_init, capture_events, app): - sentry_init(integrations=[flask_sentry.FlaskIntegration()]) + sentry_init( + integrations=[ + flask_sentry.FlaskIntegration(), + LoggingIntegration(event_level="ERROR"), + ] + ) @app.route("/", methods=["POST"]) def index(): diff --git a/tests/integrations/gcp/test_gcp.py b/tests/integrations/gcp/test_gcp.py index 22d104c817..3ea97cf0e6 100644 --- a/tests/integrations/gcp/test_gcp.py +++ b/tests/integrations/gcp/test_gcp.py @@ -293,35 +293,32 @@ def test_traces_sampler_gets_correct_values_in_sampling_context( dedent( """ functionhandler = None - event = { - "type": "chase", - "chasers": ["Maisey", "Charlie"], - "num_squirrels": 2, - } + + from collections import namedtuple + GCPEvent = namedtuple("GCPEvent", ["headers"]) + event = GCPEvent(headers={"Custom-Header": "Custom Value"}) + def cloud_function(functionhandler, event): # this runs after the transaction has started, which means we # can make assertions about traces_sampler try: traces_sampler.assert_any_call( DictionaryContaining({ - "gcp_env": DictionaryContaining({ - "function_name": "chase_into_tree", - "function_region": "dogpark", - "function_project": "SquirrelChasing", - }), - "gcp_event": { - "type": "chase", - "chasers": ["Maisey", "Charlie"], - "num_squirrels": 2, - }, + "faas.name": "chase_into_tree", + "faas.region": "dogpark", + "gcp.function.identity": "func_ID", + "gcp.function.entry_point": "cloud_function", + "gcp.function.project": "SquirrelChasing", + "cloud.provider": "gcp", + "http.request.header.custom-header": "Custom Value", }) ) except AssertionError: # catch the error and return it because the error itself will # get swallowed by the SDK as an "internal exception" - return {"AssertionError raised": True,} + return {"AssertionError raised": True} - return {"AssertionError raised": False,} + return {"AssertionError raised": False} """ ) + FUNCTIONS_PRELUDE diff --git a/tests/integrations/graphene/test_graphene.py b/tests/integrations/graphene/test_graphene.py index 5d54bb49cb..63bc5de5d2 100644 --- a/tests/integrations/graphene/test_graphene.py +++ b/tests/integrations/graphene/test_graphene.py @@ -207,7 +207,7 @@ def graphql_server_sync(): def test_graphql_span_holds_query_information(sentry_init, capture_events): sentry_init( integrations=[GrapheneIntegration(), FlaskIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, default_integrations=False, ) events = capture_events() diff --git a/tests/integrations/grpc/test_grpc.py b/tests/integrations/grpc/test_grpc.py index 8d2698f411..7d39e6b63f 100644 --- a/tests/integrations/grpc/test_grpc.py +++ b/tests/integrations/grpc/test_grpc.py @@ -5,7 +5,7 @@ from typing import List, Optional, Tuple from unittest.mock import Mock -from sentry_sdk import start_span, start_transaction +from sentry_sdk import start_span from sentry_sdk.consts import OP from sentry_sdk.integrations.grpc import GRPCIntegration from tests.conftest import ApproxDict @@ -50,7 +50,7 @@ def _tear_down(server: grpc.Server): @pytest.mark.forked -def test_grpc_server_starts_transaction(sentry_init, capture_events_forksafe): +def test_grpc_server_starts_root_span(sentry_init, capture_events_forksafe): sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()]) events = capture_events_forksafe() @@ -108,7 +108,7 @@ def test_grpc_server_other_interceptors(sentry_init, capture_events_forksafe): @pytest.mark.forked -def test_grpc_server_continues_transaction(sentry_init, capture_events_forksafe): +def test_grpc_server_continues_trace(sentry_init, capture_events_forksafe): sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()]) events = capture_events_forksafe() @@ -117,20 +117,20 @@ def test_grpc_server_continues_transaction(sentry_init, capture_events_forksafe) # Use the provided channel stub = gRPCTestServiceStub(channel) - with start_transaction() as transaction: + with start_span() as root_span: metadata = ( ( "baggage", "sentry-trace_id={trace_id},sentry-environment=test," "sentry-transaction=test-transaction,sentry-sample_rate=1.0".format( - trace_id=transaction.trace_id + trace_id=root_span.trace_id ), ), ( "sentry-trace", "{trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction.trace_id, - parent_span_id=transaction.span_id, + trace_id=root_span.trace_id, + parent_span_id=root_span.span_id, sampled=1, ), ), @@ -148,7 +148,7 @@ def test_grpc_server_continues_transaction(sentry_init, capture_events_forksafe) "source": "custom", } assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER - assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id + assert event["contexts"]["trace"]["trace_id"] == root_span.trace_id assert span["op"] == "test" @@ -162,17 +162,17 @@ def test_grpc_client_starts_span(sentry_init, capture_events_forksafe): # Use the provided channel stub = gRPCTestServiceStub(channel) - with start_transaction(): + with start_span(): stub.TestServe(gRPCTestMessage(text="test")) _tear_down(server=server) events.write_file.close() events.read_event() - local_transaction = events.read_event() - span = local_transaction["spans"][0] + local_root_span = events.read_event() + span = local_root_span["spans"][0] - assert len(local_transaction["spans"]) == 1 + assert len(local_root_span["spans"]) == 1 assert span["op"] == OP.GRPC_CLIENT assert ( span["description"] @@ -197,16 +197,16 @@ def test_grpc_client_unary_stream_starts_span(sentry_init, capture_events_forksa # Use the provided channel stub = gRPCTestServiceStub(channel) - with start_transaction(): + with start_span(): [el for el in stub.TestUnaryStream(gRPCTestMessage(text="test"))] _tear_down(server=server) events.write_file.close() - local_transaction = events.read_event() - span = local_transaction["spans"][0] + local_root_span = events.read_event() + span = local_root_span["spans"][0] - assert len(local_transaction["spans"]) == 1 + assert len(local_root_span["spans"]) == 1 assert span["op"] == OP.GRPC_CLIENT assert ( span["description"] @@ -242,7 +242,7 @@ def test_grpc_client_other_interceptor(sentry_init, capture_events_forksafe): channel = grpc.intercept_channel(channel, MockClientInterceptor()) stub = gRPCTestServiceStub(channel) - with start_transaction(): + with start_span(): stub.TestServe(gRPCTestMessage(text="test")) _tear_down(server=server) @@ -251,10 +251,10 @@ def test_grpc_client_other_interceptor(sentry_init, capture_events_forksafe): events.write_file.close() events.read_event() - local_transaction = events.read_event() - span = local_transaction["spans"][0] + local_root_span = events.read_event() + span = local_root_span["spans"][0] - assert len(local_transaction["spans"]) == 1 + assert len(local_root_span["spans"]) == 1 assert span["op"] == OP.GRPC_CLIENT assert ( span["description"] @@ -281,18 +281,18 @@ def test_grpc_client_and_servers_interceptors_integration( # Use the provided channel stub = gRPCTestServiceStub(channel) - with start_transaction(): + with start_span(): stub.TestServe(gRPCTestMessage(text="test")) _tear_down(server=server) events.write_file.close() - server_transaction = events.read_event() - local_transaction = events.read_event() + server_root_span = events.read_event() + local_root_span = events.read_event() assert ( - server_transaction["contexts"]["trace"]["trace_id"] - == local_transaction["contexts"]["trace"]["trace_id"] + server_root_span["contexts"]["trace"]["trace_id"] + == local_root_span["contexts"]["trace"]["trace_id"] ) @@ -337,26 +337,23 @@ def test_span_origin(sentry_init, capture_events_forksafe): # Use the provided channel stub = gRPCTestServiceStub(channel) - with start_transaction(name="custom_transaction"): + with start_span(name="custom_transaction"): stub.TestServe(gRPCTestMessage(text="test")) _tear_down(server=server) events.write_file.close() - transaction_from_integration = events.read_event() - custom_transaction = events.read_event() + root_span_from_integration = events.read_event() + custom_root_span = events.read_event() + assert root_span_from_integration["contexts"]["trace"]["origin"] == "auto.grpc.grpc" assert ( - transaction_from_integration["contexts"]["trace"]["origin"] == "auto.grpc.grpc" - ) - assert ( - transaction_from_integration["spans"][0]["origin"] - == "auto.grpc.grpc.TestService" + root_span_from_integration["spans"][0]["origin"] == "auto.grpc.grpc.TestService" ) # manually created in TestService, not the instrumentation - assert custom_transaction["contexts"]["trace"]["origin"] == "manual" - assert custom_transaction["spans"][0]["origin"] == "auto.grpc.grpc" + assert custom_root_span["contexts"]["trace"]["origin"] == "manual" + assert custom_root_span["spans"][0]["origin"] == "auto.grpc.grpc" class TestService(gRPCTestServiceServicer): diff --git a/tests/integrations/grpc/test_grpc_aio.py b/tests/integrations/grpc/test_grpc_aio.py index 96e9a4dba8..4f28f25345 100644 --- a/tests/integrations/grpc/test_grpc_aio.py +++ b/tests/integrations/grpc/test_grpc_aio.py @@ -5,7 +5,7 @@ import pytest_asyncio import sentry_sdk -from sentry_sdk import start_span, start_transaction +from sentry_sdk import start_span from sentry_sdk.consts import OP from sentry_sdk.integrations.grpc import GRPCIntegration from tests.conftest import ApproxDict @@ -103,20 +103,20 @@ async def test_grpc_server_continues_transaction( # Use the provided channel stub = gRPCTestServiceStub(channel) - with sentry_sdk.start_transaction() as transaction: + with sentry_sdk.start_span() as root_span: metadata = ( ( "baggage", "sentry-trace_id={trace_id},sentry-environment=test," "sentry-transaction=test-transaction,sentry-sample_rate=1.0".format( - trace_id=transaction.trace_id + trace_id=root_span.trace_id ), ), ( "sentry-trace", "{trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction.trace_id, - parent_span_id=transaction.span_id, + trace_id=root_span.trace_id, + parent_span_id=root_span.span_id, sampled=1, ), ), @@ -132,7 +132,7 @@ async def test_grpc_server_continues_transaction( "source": "custom", } assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER - assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id + assert event["contexts"]["trace"]["trace_id"] == root_span.trace_id assert span["op"] == "test" @@ -185,15 +185,15 @@ async def test_grpc_client_starts_span( # Use the provided channel stub = gRPCTestServiceStub(channel) - with start_transaction(): + with start_span(): await stub.TestServe(gRPCTestMessage(text="test")) events.write_file.close() events.read_event() - local_transaction = events.read_event() - span = local_transaction["spans"][0] + local_root_span = events.read_event() + span = local_root_span["spans"][0] - assert len(local_transaction["spans"]) == 1 + assert len(local_root_span["spans"]) == 1 assert span["op"] == OP.GRPC_CLIENT assert ( span["description"] @@ -217,15 +217,15 @@ async def test_grpc_client_unary_stream_starts_span( # Use the provided channel stub = gRPCTestServiceStub(channel) - with start_transaction(): + with start_span(): response = stub.TestUnaryStream(gRPCTestMessage(text="test")) [_ async for _ in response] events.write_file.close() - local_transaction = events.read_event() - span = local_transaction["spans"][0] + local_root_span = events.read_event() + span = local_root_span["spans"][0] - assert len(local_transaction["spans"]) == 1 + assert len(local_root_span["spans"]) == 1 assert span["op"] == OP.GRPC_CLIENT assert ( span["description"] @@ -275,24 +275,22 @@ async def test_span_origin(grpc_server_and_channel, capture_events_forksafe): # Use the provided channel stub = gRPCTestServiceStub(channel) - with start_transaction(name="custom_transaction"): + with start_span(name="custom_root_span"): await stub.TestServe(gRPCTestMessage(text="test")) events.write_file.close() - transaction_from_integration = events.read_event() - custom_transaction = events.read_event() + root_span_from_integration = events.read_event() + custom_root_span = events.read_event() + assert root_span_from_integration["contexts"]["trace"]["origin"] == "auto.grpc.grpc" assert ( - transaction_from_integration["contexts"]["trace"]["origin"] == "auto.grpc.grpc" - ) - assert ( - transaction_from_integration["spans"][0]["origin"] + root_span_from_integration["spans"][0]["origin"] == "auto.grpc.grpc.TestService.aio" ) # manually created in TestService, not the instrumentation - assert custom_transaction["contexts"]["trace"]["origin"] == "manual" - assert custom_transaction["spans"][0]["origin"] == "auto.grpc.grpc" + assert custom_root_span["contexts"]["trace"]["origin"] == "manual" + assert custom_root_span["spans"][0]["origin"] == "auto.grpc.grpc" class TestService(gRPCTestServiceServicer): diff --git a/tests/integrations/httpx/test_httpx.py b/tests/integrations/httpx/test_httpx.py index 5a35b68076..9e4b140f70 100644 --- a/tests/integrations/httpx/test_httpx.py +++ b/tests/integrations/httpx/test_httpx.py @@ -5,10 +5,10 @@ import pytest import sentry_sdk -from sentry_sdk import capture_message, start_transaction +from sentry_sdk import capture_message, start_span from sentry_sdk.consts import MATCH_ALL, SPANDATA from sentry_sdk.integrations.httpx import HttpxIntegration -from tests.conftest import ApproxDict +from tests.conftest import ApproxDict, SortedBaggage @pytest.mark.parametrize( @@ -26,7 +26,7 @@ def before_breadcrumb(crumb, hint): url = "http://example.com/" - with start_transaction(): + with start_span(): events = capture_events() if asyncio.iscoroutinefunction(httpx_client.get): @@ -64,8 +64,8 @@ def before_breadcrumb(crumb, hint): @pytest.mark.parametrize( "status_code,level", [ - (200, None), - (301, None), + (200, "info"), + (301, "info"), (403, "warning"), (405, "warning"), (500, "error"), @@ -80,7 +80,7 @@ def test_crumb_capture_client_error( url = "http://example.com/" - with start_transaction(): + with start_span(name="crumbs"): events = capture_events() if asyncio.iscoroutinefunction(httpx_client.get): @@ -98,12 +98,7 @@ def test_crumb_capture_client_error( crumb = event["breadcrumbs"]["values"][0] assert crumb["type"] == "http" assert crumb["category"] == "httplib" - - if level is None: - assert "level" not in crumb - else: - assert crumb["level"] == level - + assert crumb["level"] == level assert crumb["data"] == ApproxDict( { "url": url, @@ -119,7 +114,9 @@ def test_crumb_capture_client_error( "httpx_client", (httpx.Client(), httpx.AsyncClient()), ) -def test_outgoing_trace_headers(sentry_init, httpx_client, httpx_mock): +def test_outgoing_trace_headers( + sentry_init, httpx_client, capture_envelopes, httpx_mock +): httpx_mock.add_response() sentry_init( @@ -127,13 +124,14 @@ def test_outgoing_trace_headers(sentry_init, httpx_client, httpx_mock): integrations=[HttpxIntegration()], ) + envelopes = capture_envelopes() + url = "http://example.com/" - with start_transaction( + with start_span( name="/interactions/other-dogs/new-dog", op="greeting.sniff", - trace_id="01234567890123456789012345678901", - ) as transaction: + ): if asyncio.iscoroutinefunction(httpx_client.get): response = asyncio.get_event_loop().run_until_complete( httpx_client.get(url) @@ -141,14 +139,17 @@ def test_outgoing_trace_headers(sentry_init, httpx_client, httpx_mock): else: response = httpx_client.get(url) - request_span = transaction._span_recorder.spans[-1] - assert response.request.headers[ - "sentry-trace" - ] == "{trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction.trace_id, - parent_span_id=request_span.span_id, - sampled=1, - ) + (envelope,) = envelopes + transaction = envelope.get_transaction_event() + request_span = transaction["spans"][-1] + + assert response.request.headers[ + "sentry-trace" + ] == "{trace_id}-{parent_span_id}-{sampled}".format( + trace_id=transaction["contexts"]["trace"]["trace_id"], + parent_span_id=request_span["span_id"], + sampled=1, + ) @pytest.mark.parametrize( @@ -158,6 +159,7 @@ def test_outgoing_trace_headers(sentry_init, httpx_client, httpx_mock): def test_outgoing_trace_headers_append_to_baggage( sentry_init, httpx_client, + capture_envelopes, httpx_mock, ): httpx_mock.add_response() @@ -168,15 +170,15 @@ def test_outgoing_trace_headers_append_to_baggage( release="d08ebdb9309e1b004c6f52202de58a09c2268e42", ) + envelopes = capture_envelopes() + url = "http://example.com/" - # patch random.uniform to return a predictable sample_rand value with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.5): - with start_transaction( + with start_span( name="/interactions/other-dogs/new-dog", op="greeting.sniff", - trace_id="01234567890123456789012345678901", - ) as transaction: + ): if asyncio.iscoroutinefunction(httpx_client.get): response = asyncio.get_event_loop().run_until_complete( httpx_client.get(url, headers={"baGGage": "custom=data"}) @@ -184,18 +186,21 @@ def test_outgoing_trace_headers_append_to_baggage( else: response = httpx_client.get(url, headers={"baGGage": "custom=data"}) - request_span = transaction._span_recorder.spans[-1] - assert response.request.headers[ - "sentry-trace" - ] == "{trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction.trace_id, - parent_span_id=request_span.span_id, - sampled=1, - ) - assert ( - response.request.headers["baggage"] - == "custom=data,sentry-trace_id=01234567890123456789012345678901,sentry-sample_rand=0.500000,sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true" - ) + (envelope,) = envelopes + transaction = envelope.get_transaction_event() + request_span = transaction["spans"][-1] + trace_id = transaction["contexts"]["trace"]["trace_id"] + + assert response.request.headers[ + "sentry-trace" + ] == "{trace_id}-{parent_span_id}-{sampled}".format( + trace_id=trace_id, + parent_span_id=request_span["span_id"], + sampled=1, + ) + assert response.request.headers["baggage"] == SortedBaggage( + f"custom=data,sentry-trace_id={trace_id},sentry-sample_rand=0.500000,sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true" # noqa: E231 + ) @pytest.mark.parametrize( @@ -328,7 +333,7 @@ def test_option_trace_propagation_targets( integrations=[HttpxIntegration()], ) - with sentry_sdk.start_transaction(): # Must be in a transaction to propagate headers + with sentry_sdk.start_span(): # Must be in a root span to propagate headers if asyncio.iscoroutinefunction(httpx_client.get): asyncio.get_event_loop().run_until_complete(httpx_client.get(url)) else: @@ -342,7 +347,7 @@ def test_option_trace_propagation_targets( assert "sentry-trace" not in request_headers -def test_do_not_propagate_outside_transaction(sentry_init, httpx_mock): +def test_propagates_twp_outside_root_span(sentry_init, httpx_mock): httpx_mock.add_response() sentry_init( @@ -355,7 +360,8 @@ def test_do_not_propagate_outside_transaction(sentry_init, httpx_mock): httpx_client.get("http://example.com/") request_headers = httpx_mock.get_request().headers - assert "sentry-trace" not in request_headers + assert "sentry-trace" in request_headers + assert request_headers["sentry-trace"] == sentry_sdk.get_traceparent() @pytest.mark.tests_internal_exceptions @@ -408,7 +414,7 @@ def test_span_origin(sentry_init, capture_events, httpx_client, httpx_mock): url = "http://example.com/" - with start_transaction(name="test_transaction"): + with start_span(name="test_root_span"): if asyncio.iscoroutinefunction(httpx_client.get): asyncio.get_event_loop().run_until_complete(httpx_client.get(url)) else: diff --git a/tests/integrations/huey/test_huey.py b/tests/integrations/huey/test_huey.py index 143a369348..bdd5c2ca10 100644 --- a/tests/integrations/huey/test_huey.py +++ b/tests/integrations/huey/test_huey.py @@ -1,7 +1,7 @@ import pytest from decimal import DivisionByZero -from sentry_sdk import start_transaction +import sentry_sdk from sentry_sdk.integrations.huey import HueyIntegration from sentry_sdk.utils import parse_version @@ -160,7 +160,7 @@ def dummy_task(): events = capture_events() - with start_transaction() as transaction: + with sentry_sdk.start_span() as transaction: dummy_task() (event,) = events @@ -182,7 +182,7 @@ def test_huey_propagate_trace(init_huey, capture_events): def propagated_trace_task(): pass - with start_transaction() as outer_transaction: + with sentry_sdk.start_span() as outer_transaction: execute_huey_task(huey, propagated_trace_task) assert ( @@ -200,7 +200,7 @@ def dummy_task(): events = capture_events() - with start_transaction(): + with sentry_sdk.start_span(): dummy_task() (event,) = events diff --git a/tests/integrations/huggingface_hub/test_huggingface_hub.py b/tests/integrations/huggingface_hub/test_huggingface_hub.py index 090b0e4f3e..9a867e718b 100644 --- a/tests/integrations/huggingface_hub/test_huggingface_hub.py +++ b/tests/integrations/huggingface_hub/test_huggingface_hub.py @@ -7,7 +7,7 @@ ) from huggingface_hub.errors import OverloadedError -from sentry_sdk import start_transaction +from sentry_sdk import start_span from sentry_sdk.integrations.huggingface_hub import HuggingfaceHubIntegration @@ -51,7 +51,7 @@ def test_nonstreaming_chat_completion( ) mock_client_post(client, post_mock) - with start_transaction(name="huggingface_hub tx"): + with start_span(name="huggingface_hub tx"): response = client.text_generation( prompt="hello", details=details_arg, @@ -74,7 +74,7 @@ def test_nonstreaming_chat_completion( assert "ai.responses" not in span["data"] if details_arg: - assert span["measurements"]["ai_total_tokens_used"]["value"] == 10 + assert span["data"]["ai.total_tokens.used"] == 10 @pytest.mark.parametrize( @@ -106,7 +106,7 @@ def test_streaming_chat_completion( ) mock_client_post(client, post_mock) - with start_transaction(name="huggingface_hub tx"): + with start_span(name="huggingface_hub tx"): response = list( client.text_generation( prompt="hello", @@ -133,7 +133,7 @@ def test_streaming_chat_completion( assert "ai.responses" not in span["data"] if details_arg: - assert span["measurements"]["ai_total_tokens_used"]["value"] == 10 + assert span["data"]["ai.total_tokens.used"] == 10 def test_bad_chat_completion(sentry_init, capture_events): @@ -168,7 +168,7 @@ def test_span_origin(sentry_init, capture_events): ) mock_client_post(client, post_mock) - with start_transaction(name="huggingface_hub tx"): + with start_span(name="huggingface_hub tx"): list( client.text_generation( prompt="hello", diff --git a/tests/integrations/langchain/test_langchain.py b/tests/integrations/langchain/test_langchain.py index b9e5705b88..62f3eac04a 100644 --- a/tests/integrations/langchain/test_langchain.py +++ b/tests/integrations/langchain/test_langchain.py @@ -14,7 +14,7 @@ from langchain_core.messages import BaseMessage, AIMessageChunk from langchain_core.outputs import ChatGenerationChunk -from sentry_sdk import start_transaction +from sentry_sdk import start_span from sentry_sdk.integrations.langchain import LangchainIntegration from langchain.agents import tool, AgentExecutor, create_openai_tools_agent from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder @@ -163,7 +163,7 @@ def test_langchain_agent( agent_executor = AgentExecutor(agent=agent, tools=[get_word_length], verbose=True) - with start_transaction(): + with start_span(name="agent"): list(agent_executor.stream({"input": "How many letters in the word eudca"})) tx = events[0] @@ -179,25 +179,20 @@ def test_langchain_agent( assert len(list(x for x in tx["spans"] if x["op"] == "ai.run.langchain")) > 0 if use_unknown_llm_type: - assert "ai_prompt_tokens_used" in chat_spans[0]["measurements"] - assert "ai_total_tokens_used" in chat_spans[0]["measurements"] + assert "ai.prompt_tokens.used" in chat_spans[0]["data"] + assert "ai.total_tokens.used" in chat_spans[0]["data"] else: # important: to avoid double counting, we do *not* measure # tokens used if we have an explicit integration (e.g. OpenAI) - assert "measurements" not in chat_spans[0] + assert "ai.prompt_tokens.used" not in chat_spans[0]["data"] + assert "ai.total_tokens.used" not in chat_spans[0]["data"] if send_default_pii and include_prompts: - assert ( - "You are very powerful" - in chat_spans[0]["data"]["ai.input_messages"][0]["content"] - ) + assert "You are very powerful" in chat_spans[0]["data"]["ai.input_messages"] assert "5" in chat_spans[0]["data"]["ai.responses"] assert "word" in tool_exec_span["data"]["ai.input_messages"] assert 5 == int(tool_exec_span["data"]["ai.responses"]) - assert ( - "You are very powerful" - in chat_spans[1]["data"]["ai.input_messages"][0]["content"] - ) + assert "You are very powerful" in chat_spans[1]["data"]["ai.input_messages"] assert "5" in chat_spans[1]["data"]["ai.responses"] else: assert "ai.input_messages" not in chat_spans[0].get("data", {}) @@ -237,7 +232,7 @@ def test_langchain_error(sentry_init, capture_events): agent_executor = AgentExecutor(agent=agent, tools=[get_word_length], verbose=True) - with start_transaction(), pytest.raises(Exception): + with start_span(name="agent"), pytest.raises(Exception): list(agent_executor.stream({"input": "How many letters in the word eudca"})) error = events[0] @@ -332,7 +327,7 @@ def test_span_origin(sentry_init, capture_events): agent_executor = AgentExecutor(agent=agent, tools=[get_word_length], verbose=True) - with start_transaction(): + with start_span(name="agent"): list(agent_executor.stream({"input": "How many letters in the word eudca"})) (event,) = events diff --git a/tests/integrations/litestar/test_litestar.py b/tests/integrations/litestar/test_litestar.py index 4f642479e4..c3160a9169 100644 --- a/tests/integrations/litestar/test_litestar.py +++ b/tests/integrations/litestar/test_litestar.py @@ -6,6 +6,7 @@ from sentry_sdk import capture_message from sentry_sdk.integrations.litestar import LitestarIntegration +from tests.conftest import ApproxDict from typing import Any @@ -205,7 +206,7 @@ def is_matching_span(expected_span, actual_span): return ( expected_span["op"] == actual_span["op"] and expected_span["description"] == actual_span["description"] - and expected_span["tags"] == actual_span["tags"] + and ApproxDict(expected_span["tags"]) == actual_span["tags"] ) actual_litestar_spans = list( @@ -301,7 +302,7 @@ def is_matching_span(expected_span, actual_span): return ( expected_span["op"] == actual_span["op"] and actual_span["description"].startswith(expected_span["description"]) - and expected_span["tags"] == actual_span["tags"] + and ApproxDict(expected_span["tags"]) == actual_span["tags"] ) actual_litestar_spans = list( diff --git a/tests/integrations/logging/test_logging.py b/tests/integrations/logging/test_logging.py index c08e960c00..e54fd829f1 100644 --- a/tests/integrations/logging/test_logging.py +++ b/tests/integrations/logging/test_logging.py @@ -15,43 +15,72 @@ def reset_level(): logger.setLevel(logging.DEBUG) -@pytest.mark.parametrize("logger", [logger, other_logger]) -def test_logging_works_with_many_loggers(sentry_init, capture_events, logger): - sentry_init(integrations=[LoggingIntegration(event_level="ERROR")]) +@pytest.mark.parametrize("integrations", [None, [], [LoggingIntegration()]]) +@pytest.mark.parametrize( + "kwargs", [{"exc_info": None}, {}, {"exc_info": 0}, {"exc_info": False}] +) +def test_logging_defaults(integrations, sentry_init, capture_events, kwargs): + sentry_init(integrations=integrations) events = capture_events() logger.info("bread") - logger.critical("LOL") - (event,) = events - assert event["level"] == "fatal" - assert not event["logentry"]["params"] - assert event["logentry"]["message"] == "LOL" - assert event["logentry"]["formatted"] == "LOL" - assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]["values"]) + logger.error("error") + logger.critical("LOL", **kwargs) + + assert len(events) == 0 -@pytest.mark.parametrize("integrations", [None, [], [LoggingIntegration()]]) @pytest.mark.parametrize( "kwargs", [{"exc_info": None}, {}, {"exc_info": 0}, {"exc_info": False}] ) -def test_logging_defaults(integrations, sentry_init, capture_events, kwargs): - sentry_init(integrations=integrations) +def test_logging_basic(sentry_init, capture_events, kwargs): + sentry_init(integrations=[LoggingIntegration(event_level=logging.ERROR)]) events = capture_events() logger.info("bread") + logger.error("error") logger.critical("LOL", **kwargs) - (event,) = events + (error_event, critical_event) = events - assert event["level"] == "fatal" - assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]["values"]) + assert error_event["level"] == "error" + assert any( + crumb["message"] == "bread" for crumb in error_event["breadcrumbs"]["values"] + ) + assert not any( + crumb["message"] == "LOL" for crumb in error_event["breadcrumbs"]["values"] + ) + assert "threads" not in error_event + + assert critical_event["level"] == "fatal" + assert any( + crumb["message"] == "bread" for crumb in critical_event["breadcrumbs"]["values"] + ) assert not any( - crumb["message"] == "LOL" for crumb in event["breadcrumbs"]["values"] + crumb["message"] == "LOL" for crumb in critical_event["breadcrumbs"]["values"] ) - assert "threads" not in event + assert "threads" not in critical_event + + +@pytest.mark.parametrize("logger", [logger, other_logger]) +def test_logging_works_with_many_loggers(sentry_init, capture_events, logger): + sentry_init(integrations=[LoggingIntegration(event_level="ERROR")]) + events = capture_events() + + logger.info("bread") + logger.critical("LOL") + (event,) = events + assert event["level"] == "fatal" + assert not event["logentry"]["params"] + assert event["logentry"]["message"] == "LOL" + assert event["logentry"]["formatted"] == "LOL" + assert any(crumb["message"] == "bread" for crumb in event["breadcrumbs"]["values"]) def test_logging_extra_data(sentry_init, capture_events): - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() logger.info("bread", extra=dict(foo=42)) @@ -68,7 +97,10 @@ def test_logging_extra_data(sentry_init, capture_events): def test_logging_extra_data_integer_keys(sentry_init, capture_events): - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() logger.critical("integer in extra keys", extra={1: 1}) @@ -86,7 +118,10 @@ def test_logging_extra_data_integer_keys(sentry_init, capture_events): ), ) def test_logging_stack_trace(sentry_init, capture_events, enable_stack_trace_kwarg): - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() logger.error("first", **enable_stack_trace_kwarg) @@ -105,7 +140,10 @@ def test_logging_stack_trace(sentry_init, capture_events, enable_stack_trace_kwa def test_logging_level(sentry_init, capture_events): - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() logger.setLevel(logging.WARNING) @@ -161,7 +199,10 @@ def test_custom_log_level_names(sentry_init, capture_events): def test_logging_filters(sentry_init, capture_events): - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() should_log = False @@ -220,7 +261,10 @@ def test_logging_captured_warnings(sentry_init, capture_events, recwarn): def test_ignore_logger(sentry_init, capture_events): - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() ignore_logger("testfoo") @@ -231,7 +275,10 @@ def test_ignore_logger(sentry_init, capture_events): def test_ignore_logger_wildcard(sentry_init, capture_events): - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() ignore_logger("testfoo.*") @@ -249,7 +296,10 @@ def test_ignore_logger_wildcard(sentry_init, capture_events): def test_logging_dictionary_interpolation(sentry_init, capture_events): """Here we test an entire dictionary being interpolated into the log message.""" - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() logger.error("this is a log with a dictionary %s", {"foo": "bar"}) @@ -265,7 +315,10 @@ def test_logging_dictionary_interpolation(sentry_init, capture_events): def test_logging_dictionary_args(sentry_init, capture_events): """Here we test items from a dictionary being interpolated into the log message.""" - sentry_init(integrations=[LoggingIntegration()], default_integrations=False) + sentry_init( + integrations=[LoggingIntegration(event_level=logging.ERROR)], + default_integrations=False, + ) events = capture_events() logger.error( diff --git a/tests/integrations/openai/test_openai.py b/tests/integrations/openai/test_openai.py index 011192e49f..85ff95f377 100644 --- a/tests/integrations/openai/test_openai.py +++ b/tests/integrations/openai/test_openai.py @@ -6,7 +6,7 @@ from openai.types.chat.chat_completion_chunk import ChoiceDelta, Choice as DeltaChoice from openai.types.create_embedding_response import Usage as EmbeddingTokenUsage -from sentry_sdk import start_transaction +from sentry_sdk import start_span from sentry_sdk.integrations.openai import ( OpenAIIntegration, _calculate_chat_completion_usage, @@ -67,7 +67,7 @@ def test_nonstreaming_chat_completion( client = OpenAI(api_key="z") client.chat.completions._post = mock.Mock(return_value=EXAMPLE_CHAT_COMPLETION) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): response = ( client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] @@ -83,15 +83,15 @@ def test_nonstreaming_chat_completion( assert span["op"] == "ai.chat_completions.create.openai" if send_default_pii and include_prompts: - assert "hello" in span["data"]["ai.input_messages"]["content"] - assert "the model response" in span["data"]["ai.responses"]["content"] + assert '"content": "hello"' in span["data"]["ai.input_messages"] + assert '"content": "the model response"' in span["data"]["ai.responses"] else: assert "ai.input_messages" not in span["data"] assert "ai.responses" not in span["data"] - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 10 - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 20 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 30 + assert span["data"]["ai.completion_tokens.used"] == 10 + assert span["data"]["ai.prompt_tokens.used"] == 20 + assert span["data"]["ai.total_tokens.used"] == 30 @pytest.mark.asyncio @@ -112,7 +112,7 @@ async def test_nonstreaming_chat_completion_async( client = AsyncOpenAI(api_key="z") client.chat.completions._post = AsyncMock(return_value=EXAMPLE_CHAT_COMPLETION) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): response = await client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] ) @@ -125,15 +125,15 @@ async def test_nonstreaming_chat_completion_async( assert span["op"] == "ai.chat_completions.create.openai" if send_default_pii and include_prompts: - assert "hello" in span["data"]["ai.input_messages"]["content"] - assert "the model response" in span["data"]["ai.responses"]["content"] + assert '"content": "hello"' in span["data"]["ai.input_messages"] + assert '"content": "the model response"' in span["data"]["ai.responses"] else: assert "ai.input_messages" not in span["data"] assert "ai.responses" not in span["data"] - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 10 - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 20 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 30 + assert span["data"]["ai.completion_tokens.used"] == 10 + assert span["data"]["ai.prompt_tokens.used"] == 20 + assert span["data"]["ai.total_tokens.used"] == 30 def tiktoken_encoding_if_installed(): @@ -204,7 +204,7 @@ def test_streaming_chat_completion( ] client.chat.completions._post = mock.Mock(return_value=returned_stream) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): response_stream = client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] ) @@ -218,7 +218,7 @@ def test_streaming_chat_completion( assert span["op"] == "ai.chat_completions.create.openai" if send_default_pii and include_prompts: - assert "hello" in span["data"]["ai.input_messages"]["content"] + assert '"content": "hello"' in span["data"]["ai.input_messages"] assert "hello world" in span["data"]["ai.responses"] else: assert "ai.input_messages" not in span["data"] @@ -227,9 +227,9 @@ def test_streaming_chat_completion( try: import tiktoken # type: ignore # noqa # pylint: disable=unused-import - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 2 - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 1 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 3 + assert span["data"]["ai.completion_tokens.used"] == 2 + assert span["data"]["ai.prompt_tokens.used"] == 1 + assert span["data"]["ai.total_tokens.used"] == 3 except ImportError: pass # if tiktoken is not installed, we can't guarantee token usage will be calculated properly @@ -298,7 +298,7 @@ async def test_streaming_chat_completion_async( ) client.chat.completions._post = AsyncMock(return_value=returned_stream) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): response_stream = await client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] ) @@ -314,7 +314,7 @@ async def test_streaming_chat_completion_async( assert span["op"] == "ai.chat_completions.create.openai" if send_default_pii and include_prompts: - assert "hello" in span["data"]["ai.input_messages"]["content"] + assert '"content": "hello"' in span["data"]["ai.input_messages"] assert "hello world" in span["data"]["ai.responses"] else: assert "ai.input_messages" not in span["data"] @@ -323,13 +323,14 @@ async def test_streaming_chat_completion_async( try: import tiktoken # type: ignore # noqa # pylint: disable=unused-import - assert span["measurements"]["ai_completion_tokens_used"]["value"] == 2 - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 1 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 3 + assert span["data"]["ai.completion_tokens.used"] == 2 + assert span["data"]["ai.prompt_tokens.used"] == 1 + assert span["data"]["ai.total_tokens.used"] == 3 except ImportError: pass # if tiktoken is not installed, we can't guarantee token usage will be calculated properly +@pytest.mark.forked def test_bad_chat_completion(sentry_init, capture_events): sentry_init(integrations=[OpenAIIntegration()], traces_sample_rate=1.0) events = capture_events() @@ -392,7 +393,7 @@ def test_embeddings_create( ) client.embeddings._post = mock.Mock(return_value=returned_embedding) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): response = client.embeddings.create( input="hello", model="text-embedding-3-large" ) @@ -408,8 +409,8 @@ def test_embeddings_create( else: assert "ai.input_messages" not in span["data"] - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 20 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 30 + assert span["data"]["ai.prompt_tokens.used"] == 20 + assert span["data"]["ai.total_tokens.used"] == 30 @pytest.mark.asyncio @@ -440,7 +441,7 @@ async def test_embeddings_create_async( ) client.embeddings._post = AsyncMock(return_value=returned_embedding) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): response = await client.embeddings.create( input="hello", model="text-embedding-3-large" ) @@ -456,10 +457,11 @@ async def test_embeddings_create_async( else: assert "ai.input_messages" not in span["data"] - assert span["measurements"]["ai_prompt_tokens_used"]["value"] == 20 - assert span["measurements"]["ai_total_tokens_used"]["value"] == 30 + assert span["data"]["ai.prompt_tokens.used"] == 20 + assert span["data"]["ai.total_tokens.used"] == 30 +@pytest.mark.forked @pytest.mark.parametrize( "send_default_pii, include_prompts", [(True, True), (True, False), (False, True), (False, False)], @@ -487,6 +489,7 @@ def test_embeddings_create_raises_error( assert event["level"] == "error" +@pytest.mark.forked @pytest.mark.asyncio @pytest.mark.parametrize( "send_default_pii, include_prompts", @@ -525,7 +528,7 @@ def test_span_origin_nonstreaming_chat(sentry_init, capture_events): client = OpenAI(api_key="z") client.chat.completions._post = mock.Mock(return_value=EXAMPLE_CHAT_COMPLETION) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] ) @@ -547,7 +550,7 @@ async def test_span_origin_nonstreaming_chat_async(sentry_init, capture_events): client = AsyncOpenAI(api_key="z") client.chat.completions._post = AsyncMock(return_value=EXAMPLE_CHAT_COMPLETION) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): await client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] ) @@ -604,7 +607,7 @@ def test_span_origin_streaming_chat(sentry_init, capture_events): ] client.chat.completions._post = mock.Mock(return_value=returned_stream) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): response_stream = client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] ) @@ -668,7 +671,7 @@ async def test_span_origin_streaming_chat_async(sentry_init, capture_events): ) client.chat.completions._post = AsyncMock(return_value=returned_stream) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): response_stream = await client.chat.completions.create( model="some-model", messages=[{"role": "system", "content": "hello"}] ) @@ -703,7 +706,7 @@ def test_span_origin_embeddings(sentry_init, capture_events): ) client.embeddings._post = mock.Mock(return_value=returned_embedding) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): client.embeddings.create(input="hello", model="text-embedding-3-large") (event,) = events @@ -733,7 +736,7 @@ async def test_span_origin_embeddings_async(sentry_init, capture_events): ) client.embeddings._post = AsyncMock(return_value=returned_embedding) - with start_transaction(name="openai tx"): + with start_span(name="openai tx"): await client.embeddings.create(input="hello", model="text-embedding-3-large") (event,) = events diff --git a/tests/integrations/opentelemetry/test_experimental.py b/tests/integrations/opentelemetry/test_experimental.py deleted file mode 100644 index 8e4b703361..0000000000 --- a/tests/integrations/opentelemetry/test_experimental.py +++ /dev/null @@ -1,47 +0,0 @@ -from unittest.mock import MagicMock, patch - -import pytest - - -@pytest.mark.forked -def test_integration_enabled_if_option_is_on(sentry_init, reset_integrations): - mocked_setup_once = MagicMock() - - with patch( - "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration.setup_once", - mocked_setup_once, - ): - sentry_init( - _experiments={ - "otel_powered_performance": True, - }, - ) - mocked_setup_once.assert_called_once() - - -@pytest.mark.forked -def test_integration_not_enabled_if_option_is_off(sentry_init, reset_integrations): - mocked_setup_once = MagicMock() - - with patch( - "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration.setup_once", - mocked_setup_once, - ): - sentry_init( - _experiments={ - "otel_powered_performance": False, - }, - ) - mocked_setup_once.assert_not_called() - - -@pytest.mark.forked -def test_integration_not_enabled_if_option_is_missing(sentry_init, reset_integrations): - mocked_setup_once = MagicMock() - - with patch( - "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration.setup_once", - mocked_setup_once, - ): - sentry_init() - mocked_setup_once.assert_not_called() diff --git a/tests/integrations/opentelemetry/test_propagator.py b/tests/integrations/opentelemetry/test_propagator.py deleted file mode 100644 index d999b0bb2b..0000000000 --- a/tests/integrations/opentelemetry/test_propagator.py +++ /dev/null @@ -1,300 +0,0 @@ -import pytest - -from unittest import mock -from unittest.mock import MagicMock - -from opentelemetry.context import get_current -from opentelemetry.trace import ( - SpanContext, - TraceFlags, - set_span_in_context, -) -from opentelemetry.trace.propagation import get_current_span - -from sentry_sdk.integrations.opentelemetry.consts import ( - SENTRY_BAGGAGE_KEY, - SENTRY_TRACE_KEY, -) -from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator -from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor -from sentry_sdk.tracing_utils import Baggage - - -@pytest.mark.forked -def test_extract_no_context_no_sentry_trace_header(): - """ - No context and NO Sentry trace data in getter. - Extract should return empty context. - """ - carrier = None - context = None - getter = MagicMock() - getter.get.return_value = None - - modified_context = SentryPropagator().extract(carrier, context, getter) - - assert modified_context == {} - - -@pytest.mark.forked -def test_extract_context_no_sentry_trace_header(): - """ - Context but NO Sentry trace data in getter. - Extract should return context as is. - """ - carrier = None - context = {"some": "value"} - getter = MagicMock() - getter.get.return_value = None - - modified_context = SentryPropagator().extract(carrier, context, getter) - - assert modified_context == context - - -@pytest.mark.forked -def test_extract_empty_context_sentry_trace_header_no_baggage(): - """ - Empty context but Sentry trace data but NO Baggage in getter. - Extract should return context that has empty baggage in it and also a NoopSpan with span_id and trace_id. - """ - carrier = None - context = {} - getter = MagicMock() - getter.get.side_effect = [ - ["1234567890abcdef1234567890abcdef-1234567890abcdef-1"], - None, - ] - - modified_context = SentryPropagator().extract(carrier, context, getter) - - assert len(modified_context.keys()) == 3 - - assert modified_context[SENTRY_TRACE_KEY] == { - "trace_id": "1234567890abcdef1234567890abcdef", - "parent_span_id": "1234567890abcdef", - "parent_sampled": True, - } - assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == "" - - span_context = get_current_span(modified_context).get_span_context() - assert span_context.span_id == int("1234567890abcdef", 16) - assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16) - - -@pytest.mark.forked -def test_extract_context_sentry_trace_header_baggage(): - """ - Empty context but Sentry trace data and Baggage in getter. - Extract should return context that has baggage in it and also a NoopSpan with span_id and trace_id. - """ - baggage_header = ( - "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, " - "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, " - "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;" - ) - - carrier = None - context = {"some": "value"} - getter = MagicMock() - getter.get.side_effect = [ - ["1234567890abcdef1234567890abcdef-1234567890abcdef-1"], - [baggage_header], - ] - - modified_context = SentryPropagator().extract(carrier, context, getter) - - assert len(modified_context.keys()) == 4 - - assert modified_context[SENTRY_TRACE_KEY] == { - "trace_id": "1234567890abcdef1234567890abcdef", - "parent_span_id": "1234567890abcdef", - "parent_sampled": True, - } - - assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == ( - "sentry-trace_id=771a43a4192642f0b136d5159a501700," - "sentry-public_key=49d0f7386ad645858ae85020e393bef3," - "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie" - ) - - span_context = get_current_span(modified_context).get_span_context() - assert span_context.span_id == int("1234567890abcdef", 16) - assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16) - - -@pytest.mark.forked -def test_inject_empty_otel_span_map(): - """ - Empty otel_span_map. - So there is no sentry_span to be found in inject() - and the function is returned early and no setters are called. - """ - carrier = None - context = get_current() - setter = MagicMock() - setter.set = MagicMock() - - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - trace_flags=TraceFlags(TraceFlags.SAMPLED), - is_remote=True, - ) - span = MagicMock() - span.get_span_context.return_value = span_context - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span", - return_value=span, - ): - full_context = set_span_in_context(span, context) - SentryPropagator().inject(carrier, full_context, setter) - - setter.set.assert_not_called() - - -@pytest.mark.forked -def test_inject_sentry_span_no_baggage(): - """ - Inject a sentry span with no baggage. - """ - carrier = None - context = get_current() - setter = MagicMock() - setter.set = MagicMock() - - trace_id = "1234567890abcdef1234567890abcdef" - span_id = "1234567890abcdef" - - span_context = SpanContext( - trace_id=int(trace_id, 16), - span_id=int(span_id, 16), - trace_flags=TraceFlags(TraceFlags.SAMPLED), - is_remote=True, - ) - span = MagicMock() - span.get_span_context.return_value = span_context - - sentry_span = MagicMock() - sentry_span.to_traceparent = mock.Mock( - return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1" - ) - sentry_span.containing_transaction.get_baggage = mock.Mock(return_value=None) - - span_processor = SentrySpanProcessor() - span_processor.otel_span_map[span_id] = sentry_span - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span", - return_value=span, - ): - full_context = set_span_in_context(span, context) - SentryPropagator().inject(carrier, full_context, setter) - - setter.set.assert_called_once_with( - carrier, - "sentry-trace", - "1234567890abcdef1234567890abcdef-1234567890abcdef-1", - ) - - -def test_inject_sentry_span_empty_baggage(): - """ - Inject a sentry span with no baggage. - """ - carrier = None - context = get_current() - setter = MagicMock() - setter.set = MagicMock() - - trace_id = "1234567890abcdef1234567890abcdef" - span_id = "1234567890abcdef" - - span_context = SpanContext( - trace_id=int(trace_id, 16), - span_id=int(span_id, 16), - trace_flags=TraceFlags(TraceFlags.SAMPLED), - is_remote=True, - ) - span = MagicMock() - span.get_span_context.return_value = span_context - - sentry_span = MagicMock() - sentry_span.to_traceparent = mock.Mock( - return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1" - ) - sentry_span.containing_transaction.get_baggage = mock.Mock(return_value=Baggage({})) - - span_processor = SentrySpanProcessor() - span_processor.otel_span_map[span_id] = sentry_span - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span", - return_value=span, - ): - full_context = set_span_in_context(span, context) - SentryPropagator().inject(carrier, full_context, setter) - - setter.set.assert_called_once_with( - carrier, - "sentry-trace", - "1234567890abcdef1234567890abcdef-1234567890abcdef-1", - ) - - -def test_inject_sentry_span_baggage(): - """ - Inject a sentry span with baggage. - """ - carrier = None - context = get_current() - setter = MagicMock() - setter.set = MagicMock() - - trace_id = "1234567890abcdef1234567890abcdef" - span_id = "1234567890abcdef" - - span_context = SpanContext( - trace_id=int(trace_id, 16), - span_id=int(span_id, 16), - trace_flags=TraceFlags(TraceFlags.SAMPLED), - is_remote=True, - ) - span = MagicMock() - span.get_span_context.return_value = span_context - - sentry_span = MagicMock() - sentry_span.to_traceparent = mock.Mock( - return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1" - ) - sentry_items = { - "sentry-trace_id": "771a43a4192642f0b136d5159a501700", - "sentry-public_key": "49d0f7386ad645858ae85020e393bef3", - "sentry-sample_rate": 0.01337, - "sentry-user_id": "Amélie", - } - baggage = Baggage(sentry_items=sentry_items) - sentry_span.containing_transaction.get_baggage = MagicMock(return_value=baggage) - - span_processor = SentrySpanProcessor() - span_processor.otel_span_map[span_id] = sentry_span - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span", - return_value=span, - ): - full_context = set_span_in_context(span, context) - SentryPropagator().inject(carrier, full_context, setter) - - setter.set.assert_any_call( - carrier, - "sentry-trace", - "1234567890abcdef1234567890abcdef-1234567890abcdef-1", - ) - - setter.set.assert_any_call( - carrier, - "baggage", - baggage.serialize(), - ) diff --git a/tests/integrations/opentelemetry/test_span_processor.py b/tests/integrations/opentelemetry/test_span_processor.py deleted file mode 100644 index ec5cf6af23..0000000000 --- a/tests/integrations/opentelemetry/test_span_processor.py +++ /dev/null @@ -1,608 +0,0 @@ -import time -from datetime import datetime, timezone -from unittest import mock -from unittest.mock import MagicMock - -import pytest -from opentelemetry.trace import SpanKind, SpanContext, Status, StatusCode - -import sentry_sdk -from sentry_sdk.integrations.opentelemetry.span_processor import ( - SentrySpanProcessor, - link_trace_context_to_error_event, -) -from sentry_sdk.tracing import Span, Transaction -from sentry_sdk.tracing_utils import extract_sentrytrace_data - - -def test_is_sentry_span(): - otel_span = MagicMock() - - span_processor = SentrySpanProcessor() - assert not span_processor._is_sentry_span(otel_span) - - client = MagicMock() - client.options = {"instrumenter": "otel"} - client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" - sentry_sdk.get_global_scope().set_client(client) - - assert not span_processor._is_sentry_span(otel_span) - - otel_span.attributes = { - "http.url": "https://example.com", - } - assert not span_processor._is_sentry_span(otel_span) - - otel_span.attributes = { - "http.url": "https://o123456.ingest.sentry.io/api/123/envelope", - } - assert span_processor._is_sentry_span(otel_span) - - -def test_get_otel_context(): - otel_span = MagicMock() - otel_span.attributes = {"foo": "bar"} - otel_span.resource = MagicMock() - otel_span.resource.attributes = {"baz": "qux"} - - span_processor = SentrySpanProcessor() - otel_context = span_processor._get_otel_context(otel_span) - - assert otel_context == { - "attributes": {"foo": "bar"}, - "resource": {"baz": "qux"}, - } - - -def test_get_trace_data_with_span_and_trace(): - otel_span = MagicMock() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = None - - parent_context = {} - - span_processor = SentrySpanProcessor() - sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context) - assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef" - assert sentry_trace_data["span_id"] == "1234567890abcdef" - assert sentry_trace_data["parent_span_id"] is None - assert sentry_trace_data["parent_sampled"] is None - assert sentry_trace_data["baggage"] is None - - -def test_get_trace_data_with_span_and_trace_and_parent(): - otel_span = MagicMock() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = MagicMock() - otel_span.parent.span_id = int("abcdef1234567890", 16) - - parent_context = {} - - span_processor = SentrySpanProcessor() - sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context) - assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef" - assert sentry_trace_data["span_id"] == "1234567890abcdef" - assert sentry_trace_data["parent_span_id"] == "abcdef1234567890" - assert sentry_trace_data["parent_sampled"] is None - assert sentry_trace_data["baggage"] is None - - -def test_get_trace_data_with_sentry_trace(): - otel_span = MagicMock() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = MagicMock() - otel_span.parent.span_id = int("abcdef1234567890", 16) - - parent_context = {} - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.span_processor.get_value", - side_effect=[ - extract_sentrytrace_data( - "1234567890abcdef1234567890abcdef-1234567890abcdef-1" - ), - None, - ], - ): - span_processor = SentrySpanProcessor() - sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context) - assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef" - assert sentry_trace_data["span_id"] == "1234567890abcdef" - assert sentry_trace_data["parent_span_id"] == "abcdef1234567890" - assert sentry_trace_data["parent_sampled"] is True - assert sentry_trace_data["baggage"] is None - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.span_processor.get_value", - side_effect=[ - extract_sentrytrace_data( - "1234567890abcdef1234567890abcdef-1234567890abcdef-0" - ), - None, - ], - ): - span_processor = SentrySpanProcessor() - sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context) - assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef" - assert sentry_trace_data["span_id"] == "1234567890abcdef" - assert sentry_trace_data["parent_span_id"] == "abcdef1234567890" - assert sentry_trace_data["parent_sampled"] is False - assert sentry_trace_data["baggage"] is None - - -def test_get_trace_data_with_sentry_trace_and_baggage(): - otel_span = MagicMock() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = MagicMock() - otel_span.parent.span_id = int("abcdef1234567890", 16) - - parent_context = {} - - baggage = ( - "sentry-trace_id=771a43a4192642f0b136d5159a501700," - "sentry-public_key=49d0f7386ad645858ae85020e393bef3," - "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie" - ) - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.span_processor.get_value", - side_effect=[ - extract_sentrytrace_data( - "1234567890abcdef1234567890abcdef-1234567890abcdef-1" - ), - baggage, - ], - ): - span_processor = SentrySpanProcessor() - sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context) - assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef" - assert sentry_trace_data["span_id"] == "1234567890abcdef" - assert sentry_trace_data["parent_span_id"] == "abcdef1234567890" - assert sentry_trace_data["parent_sampled"] - assert sentry_trace_data["baggage"] == baggage - - -def test_update_span_with_otel_data_http_method(): - sentry_span = Span() - - otel_span = MagicMock() - otel_span.name = "Test OTel Span" - otel_span.kind = SpanKind.CLIENT - otel_span.attributes = { - "http.method": "GET", - "http.status_code": 429, - "http.status_text": "xxx", - "http.user_agent": "curl/7.64.1", - "net.peer.name": "example.com", - "http.target": "/", - } - - span_processor = SentrySpanProcessor() - span_processor._update_span_with_otel_data(sentry_span, otel_span) - - assert sentry_span.op == "http.client" - assert sentry_span.description == "GET example.com /" - assert sentry_span.status == "resource_exhausted" - - assert sentry_span._data["http.method"] == "GET" - assert sentry_span._data["http.response.status_code"] == 429 - assert sentry_span._data["http.status_text"] == "xxx" - assert sentry_span._data["http.user_agent"] == "curl/7.64.1" - assert sentry_span._data["net.peer.name"] == "example.com" - assert sentry_span._data["http.target"] == "/" - - -@pytest.mark.parametrize( - "otel_status, expected_status", - [ - pytest.param(Status(StatusCode.UNSET), None, id="unset"), - pytest.param(Status(StatusCode.OK), "ok", id="ok"), - pytest.param(Status(StatusCode.ERROR), "internal_error", id="error"), - ], -) -def test_update_span_with_otel_status(otel_status, expected_status): - sentry_span = Span() - - otel_span = MagicMock() - otel_span.name = "Test OTel Span" - otel_span.kind = SpanKind.INTERNAL - otel_span.status = otel_status - - span_processor = SentrySpanProcessor() - span_processor._update_span_with_otel_status(sentry_span, otel_span) - - assert sentry_span.get_trace_context().get("status") == expected_status - - -def test_update_span_with_otel_data_http_method2(): - sentry_span = Span() - - otel_span = MagicMock() - otel_span.name = "Test OTel Span" - otel_span.kind = SpanKind.SERVER - otel_span.attributes = { - "http.method": "GET", - "http.status_code": 429, - "http.status_text": "xxx", - "http.user_agent": "curl/7.64.1", - "http.url": "https://example.com/status/403?password=123&username=test@example.com&author=User123&auth=1234567890abcdef", - } - - span_processor = SentrySpanProcessor() - span_processor._update_span_with_otel_data(sentry_span, otel_span) - - assert sentry_span.op == "http.server" - assert sentry_span.description == "GET https://example.com/status/403" - assert sentry_span.status == "resource_exhausted" - - assert sentry_span._data["http.method"] == "GET" - assert sentry_span._data["http.response.status_code"] == 429 - assert sentry_span._data["http.status_text"] == "xxx" - assert sentry_span._data["http.user_agent"] == "curl/7.64.1" - assert ( - sentry_span._data["http.url"] - == "https://example.com/status/403?password=123&username=test@example.com&author=User123&auth=1234567890abcdef" - ) - - -def test_update_span_with_otel_data_db_query(): - sentry_span = Span() - - otel_span = MagicMock() - otel_span.name = "Test OTel Span" - otel_span.attributes = { - "db.system": "postgresql", - "db.statement": "SELECT * FROM table where pwd = '123456'", - } - - span_processor = SentrySpanProcessor() - span_processor._update_span_with_otel_data(sentry_span, otel_span) - - assert sentry_span.op == "db" - assert sentry_span.description == "SELECT * FROM table where pwd = '123456'" - - assert sentry_span._data["db.system"] == "postgresql" - assert ( - sentry_span._data["db.statement"] == "SELECT * FROM table where pwd = '123456'" - ) - - -def test_on_start_transaction(): - otel_span = MagicMock() - otel_span.name = "Sample OTel Span" - otel_span.start_time = time.time_ns() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = MagicMock() - otel_span.parent.span_id = int("abcdef1234567890", 16) - - parent_context = {} - - fake_start_transaction = MagicMock() - - fake_client = MagicMock() - fake_client.options = {"instrumenter": "otel"} - fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" - sentry_sdk.get_global_scope().set_client(fake_client) - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.span_processor.start_transaction", - fake_start_transaction, - ): - span_processor = SentrySpanProcessor() - span_processor.on_start(otel_span, parent_context) - - fake_start_transaction.assert_called_once_with( - name="Sample OTel Span", - span_id="1234567890abcdef", - parent_span_id="abcdef1234567890", - trace_id="1234567890abcdef1234567890abcdef", - baggage=None, - start_timestamp=datetime.fromtimestamp( - otel_span.start_time / 1e9, timezone.utc - ), - instrumenter="otel", - origin="auto.otel", - ) - - assert len(span_processor.otel_span_map.keys()) == 1 - assert list(span_processor.otel_span_map.keys())[0] == "1234567890abcdef" - - -def test_on_start_child(): - otel_span = MagicMock() - otel_span.name = "Sample OTel Span" - otel_span.start_time = time.time_ns() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = MagicMock() - otel_span.parent.span_id = int("abcdef1234567890", 16) - - parent_context = {} - - fake_client = MagicMock() - fake_client.options = {"instrumenter": "otel"} - fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" - sentry_sdk.get_global_scope().set_client(fake_client) - - fake_span = MagicMock() - - span_processor = SentrySpanProcessor() - span_processor.otel_span_map["abcdef1234567890"] = fake_span - span_processor.on_start(otel_span, parent_context) - - fake_span.start_child.assert_called_once_with( - span_id="1234567890abcdef", - name="Sample OTel Span", - start_timestamp=datetime.fromtimestamp( - otel_span.start_time / 1e9, timezone.utc - ), - instrumenter="otel", - origin="auto.otel", - ) - - assert len(span_processor.otel_span_map.keys()) == 2 - assert "abcdef1234567890" in span_processor.otel_span_map.keys() - assert "1234567890abcdef" in span_processor.otel_span_map.keys() - - -def test_on_end_no_sentry_span(): - """ - If on_end is called on a span that is not in the otel_span_map, it should be a no-op. - """ - otel_span = MagicMock() - otel_span.name = "Sample OTel Span" - otel_span.end_time = time.time_ns() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - - span_processor = SentrySpanProcessor() - span_processor.otel_span_map = {} - span_processor._get_otel_context = MagicMock() - span_processor._update_span_with_otel_data = MagicMock() - - span_processor.on_end(otel_span) - - span_processor._get_otel_context.assert_not_called() - span_processor._update_span_with_otel_data.assert_not_called() - - -def test_on_end_sentry_transaction(): - """ - Test on_end for a sentry Transaction. - """ - otel_span = MagicMock() - otel_span.name = "Sample OTel Span" - otel_span.end_time = time.time_ns() - otel_span.status = Status(StatusCode.OK) - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - - fake_client = MagicMock() - fake_client.options = {"instrumenter": "otel"} - sentry_sdk.get_global_scope().set_client(fake_client) - - fake_sentry_span = MagicMock(spec=Transaction) - fake_sentry_span.set_context = MagicMock() - fake_sentry_span.finish = MagicMock() - - span_processor = SentrySpanProcessor() - span_processor._get_otel_context = MagicMock() - span_processor._update_span_with_otel_data = MagicMock() - span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span - - span_processor.on_end(otel_span) - - fake_sentry_span.set_context.assert_called_once() - span_processor._update_span_with_otel_data.assert_not_called() - fake_sentry_span.set_status.assert_called_once_with("ok") - fake_sentry_span.finish.assert_called_once() - - -def test_on_end_sentry_span(): - """ - Test on_end for a sentry Span. - """ - otel_span = MagicMock() - otel_span.name = "Sample OTel Span" - otel_span.end_time = time.time_ns() - otel_span.status = Status(StatusCode.OK) - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - - fake_client = MagicMock() - fake_client.options = {"instrumenter": "otel"} - sentry_sdk.get_global_scope().set_client(fake_client) - - fake_sentry_span = MagicMock(spec=Span) - fake_sentry_span.set_context = MagicMock() - fake_sentry_span.finish = MagicMock() - - span_processor = SentrySpanProcessor() - span_processor._get_otel_context = MagicMock() - span_processor._update_span_with_otel_data = MagicMock() - span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span - - span_processor.on_end(otel_span) - - fake_sentry_span.set_context.assert_not_called() - span_processor._update_span_with_otel_data.assert_called_once_with( - fake_sentry_span, otel_span - ) - fake_sentry_span.set_status.assert_called_once_with("ok") - fake_sentry_span.finish.assert_called_once() - - -def test_link_trace_context_to_error_event(): - """ - Test that the trace context is added to the error event. - """ - fake_client = MagicMock() - fake_client.options = {"instrumenter": "otel"} - sentry_sdk.get_global_scope().set_client(fake_client) - - span_id = "1234567890abcdef" - trace_id = "1234567890abcdef1234567890abcdef" - - fake_trace_context = { - "bla": "blub", - "foo": "bar", - "baz": 123, - } - - sentry_span = MagicMock() - sentry_span.get_trace_context = MagicMock(return_value=fake_trace_context) - - otel_span_map = { - span_id: sentry_span, - } - - span_context = SpanContext( - trace_id=int(trace_id, 16), - span_id=int(span_id, 16), - is_remote=True, - ) - otel_span = MagicMock() - otel_span.get_span_context = MagicMock(return_value=span_context) - - fake_event = {"event_id": "1234567890abcdef1234567890abcdef"} - - with mock.patch( - "sentry_sdk.integrations.opentelemetry.span_processor.get_current_span", - return_value=otel_span, - ): - event = link_trace_context_to_error_event(fake_event, otel_span_map) - - assert event - assert event == fake_event # the event is changed in place inside the function - assert "contexts" in event - assert "trace" in event["contexts"] - assert event["contexts"]["trace"] == fake_trace_context - - -def test_pruning_old_spans_on_start(): - otel_span = MagicMock() - otel_span.name = "Sample OTel Span" - otel_span.start_time = time.time_ns() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = MagicMock() - otel_span.parent.span_id = int("abcdef1234567890", 16) - - parent_context = {} - fake_client = MagicMock() - fake_client.options = {"instrumenter": "otel", "debug": False} - fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" - sentry_sdk.get_global_scope().set_client(fake_client) - - span_processor = SentrySpanProcessor() - - span_processor.otel_span_map = { - "111111111abcdef": MagicMock(), # should stay - "2222222222abcdef": MagicMock(), # should go - "3333333333abcdef": MagicMock(), # should go - } - current_time_minutes = int(time.time() / 60) - span_processor.open_spans = { - current_time_minutes - 3: {"111111111abcdef"}, # should stay - current_time_minutes - - 11: {"2222222222abcdef", "3333333333abcdef"}, # should go - } - - span_processor.on_start(otel_span, parent_context) - assert sorted(list(span_processor.otel_span_map.keys())) == [ - "111111111abcdef", - "1234567890abcdef", - ] - assert sorted(list(span_processor.open_spans.values())) == [ - {"111111111abcdef"}, - {"1234567890abcdef"}, - ] - - -def test_pruning_old_spans_on_end(): - otel_span = MagicMock() - otel_span.name = "Sample OTel Span" - otel_span.start_time = time.time_ns() - span_context = SpanContext( - trace_id=int("1234567890abcdef1234567890abcdef", 16), - span_id=int("1234567890abcdef", 16), - is_remote=True, - ) - otel_span.get_span_context.return_value = span_context - otel_span.parent = MagicMock() - otel_span.parent.span_id = int("abcdef1234567890", 16) - - fake_client = MagicMock() - fake_client.options = {"instrumenter": "otel"} - sentry_sdk.get_global_scope().set_client(fake_client) - - fake_sentry_span = MagicMock(spec=Span) - fake_sentry_span.set_context = MagicMock() - fake_sentry_span.finish = MagicMock() - - span_processor = SentrySpanProcessor() - span_processor._get_otel_context = MagicMock() - span_processor._update_span_with_otel_data = MagicMock() - - span_processor.otel_span_map = { - "111111111abcdef": MagicMock(), # should stay - "2222222222abcdef": MagicMock(), # should go - "3333333333abcdef": MagicMock(), # should go - "1234567890abcdef": fake_sentry_span, # should go (because it is closed) - } - current_time_minutes = int(time.time() / 60) - span_processor.open_spans = { - current_time_minutes: {"1234567890abcdef"}, # should go (because it is closed) - current_time_minutes - 3: {"111111111abcdef"}, # should stay - current_time_minutes - - 11: {"2222222222abcdef", "3333333333abcdef"}, # should go - } - - span_processor.on_end(otel_span) - assert sorted(list(span_processor.otel_span_map.keys())) == ["111111111abcdef"] - assert sorted(list(span_processor.open_spans.values())) == [{"111111111abcdef"}] diff --git a/tests/integrations/pymongo/test_pymongo.py b/tests/integrations/pymongo/test_pymongo.py index 10f1c9fba9..e5751854b7 100644 --- a/tests/integrations/pymongo/test_pymongo.py +++ b/tests/integrations/pymongo/test_pymongo.py @@ -1,4 +1,6 @@ -from sentry_sdk import capture_message, start_transaction +import re + +import sentry_sdk from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.pymongo import PyMongoIntegration, _strip_pii @@ -35,7 +37,7 @@ def test_transactions(sentry_init, capture_events, mongo_server, with_pii): connection = MongoClient(mongo_server.uri) - with start_transaction(): + with sentry_sdk.start_span(): list( connection["test_db"]["test_collection"].find({"foobar": 1}) ) # force query execution @@ -49,7 +51,7 @@ def test_transactions(sentry_init, capture_events, mongo_server, with_pii): (event,) = events (find, insert_success, insert_fail) = event["spans"] - common_tags = { + common_data = { "db.name": "test_db", "db.system": "mongodb", "net.peer.name": mongo_server.host, @@ -60,8 +62,7 @@ def test_transactions(sentry_init, capture_events, mongo_server, with_pii): assert span["data"][SPANDATA.DB_NAME] == "test_db" assert span["data"][SPANDATA.SERVER_ADDRESS] == "localhost" assert span["data"][SPANDATA.SERVER_PORT] == mongo_server.port - for field, value in common_tags.items(): - assert span["tags"][field] == value + for field, value in common_data.items(): assert span["data"][field] == value assert find["op"] == "db" @@ -69,22 +70,16 @@ def test_transactions(sentry_init, capture_events, mongo_server, with_pii): assert insert_fail["op"] == "db" assert find["data"]["db.operation"] == "find" - assert find["tags"]["db.operation"] == "find" assert insert_success["data"]["db.operation"] == "insert" - assert insert_success["tags"]["db.operation"] == "insert" assert insert_fail["data"]["db.operation"] == "insert" - assert insert_fail["tags"]["db.operation"] == "insert" assert find["description"].startswith('{"find') - assert insert_success["description"].startswith('{"insert') - assert insert_fail["description"].startswith('{"insert') + assert re.match("^{['\"]insert.*", insert_success["description"]) + assert re.match("^{['\"]insert.*", insert_fail["description"]) assert find["data"][SPANDATA.DB_MONGODB_COLLECTION] == "test_collection" - assert find["tags"][SPANDATA.DB_MONGODB_COLLECTION] == "test_collection" assert insert_success["data"][SPANDATA.DB_MONGODB_COLLECTION] == "test_collection" - assert insert_success["tags"][SPANDATA.DB_MONGODB_COLLECTION] == "test_collection" assert insert_fail["data"][SPANDATA.DB_MONGODB_COLLECTION] == "erroneous" - assert insert_fail["tags"][SPANDATA.DB_MONGODB_COLLECTION] == "erroneous" if with_pii: assert "1" in find["description"] assert "2" in insert_success["description"] @@ -99,16 +94,22 @@ def test_transactions(sentry_init, capture_events, mongo_server, with_pii): and "4" not in insert_fail["description"] ) - assert find["tags"]["status"] == "ok" - assert insert_success["tags"]["status"] == "ok" - assert insert_fail["tags"]["status"] == "internal_error" - -@pytest.mark.parametrize("with_pii", [False, True]) -def test_breadcrumbs(sentry_init, capture_events, mongo_server, with_pii): +@pytest.mark.parametrize( + "with_pii,traces_sample_rate", + [ + [False, 0.0], + [False, 1.0], + [True, 0.0], + [True, 1.0], + ], +) +def test_breadcrumbs( + sentry_init, capture_events, mongo_server, with_pii, traces_sample_rate +): sentry_init( integrations=[PyMongoIntegration()], - traces_sample_rate=1.0, + traces_sample_rate=traces_sample_rate, send_default_pii=with_pii, ) events = capture_events() @@ -118,7 +119,7 @@ def test_breadcrumbs(sentry_init, capture_events, mongo_server, with_pii): list( connection["test_db"]["test_collection"].find({"foobar": 1}) ) # force query execution - capture_message("hi") + sentry_sdk.capture_message("hi") (event,) = events (crumb,) = event["breadcrumbs"]["values"] @@ -445,7 +446,7 @@ def test_span_origin(sentry_init, capture_events, mongo_server): connection = MongoClient(mongo_server.uri) - with start_transaction(): + with sentry_sdk.start_span(): list( connection["test_db"]["test_collection"].find({"foobar": 1}) ) # force query execution diff --git a/tests/integrations/ray/test_ray.py b/tests/integrations/ray/test_ray.py index 95ab4ad0fa..a8c752269a 100644 --- a/tests/integrations/ray/test_ray.py +++ b/tests/integrations/ray/test_ray.py @@ -77,42 +77,42 @@ def example_task(): return sentry_sdk.get_client().transport.envelopes - with sentry_sdk.start_transaction(op="task", name="ray test transaction"): + with sentry_sdk.start_span(op="test", name="ray client root span"): worker_envelopes = ray.get(example_task.remote()) client_envelope = sentry_sdk.get_client().transport.envelopes[0] - client_transaction = client_envelope.get_transaction_event() - assert client_transaction["transaction"] == "ray test transaction" - assert client_transaction["transaction_info"] == {"source": "custom"} + client_root_span = client_envelope.get_transaction_event() + assert client_root_span["transaction"] == "ray client root span" + assert client_root_span["transaction_info"] == {"source": "custom"} worker_envelope = worker_envelopes[0] - worker_transaction = worker_envelope.get_transaction_event() + worker_root_span = worker_envelope.get_transaction_event() assert ( - worker_transaction["transaction"] + worker_root_span["transaction"] == "tests.integrations.ray.test_ray.test_tracing_in_ray_tasks..example_task" ) - assert worker_transaction["transaction_info"] == {"source": "task"} + assert worker_root_span["transaction_info"] == {"source": "task"} - (span,) = client_transaction["spans"] + (span,) = client_root_span["spans"] assert span["op"] == "queue.submit.ray" assert span["origin"] == "auto.queue.ray" assert ( span["description"] == "tests.integrations.ray.test_ray.test_tracing_in_ray_tasks..example_task" ) - assert span["parent_span_id"] == client_transaction["contexts"]["trace"]["span_id"] - assert span["trace_id"] == client_transaction["contexts"]["trace"]["trace_id"] + assert span["parent_span_id"] == client_root_span["contexts"]["trace"]["span_id"] + assert span["trace_id"] == client_root_span["contexts"]["trace"]["trace_id"] - (span,) = worker_transaction["spans"] + (span,) = worker_root_span["spans"] assert span["op"] == "task" assert span["origin"] == "manual" assert span["description"] == "example task step" - assert span["parent_span_id"] == worker_transaction["contexts"]["trace"]["span_id"] - assert span["trace_id"] == worker_transaction["contexts"]["trace"]["trace_id"] + assert span["parent_span_id"] == worker_root_span["contexts"]["trace"]["span_id"] + assert span["trace_id"] == worker_root_span["contexts"]["trace"]["trace_id"] assert ( - client_transaction["contexts"]["trace"]["trace_id"] - == worker_transaction["contexts"]["trace"]["trace_id"] + client_root_span["contexts"]["trace"]["trace_id"] + == worker_root_span["contexts"]["trace"]["trace_id"] ) @@ -132,7 +132,7 @@ def test_errors_in_ray_tasks(): def example_task(): 1 / 0 - with sentry_sdk.start_transaction(op="task", name="ray test transaction"): + with sentry_sdk.start_span(op="test", name="ray client root span"): with pytest.raises(ZeroDivisionError): future = example_task.remote() ray.get(future) @@ -167,22 +167,24 @@ def __init__(self): self.n = 0 def increment(self): - with sentry_sdk.start_span(op="task", name="example actor execution"): + with sentry_sdk.start_span( + op="test", name="custom span in actor execution", only_if_parent=True + ): self.n += 1 return sentry_sdk.get_client().transport.envelopes - with sentry_sdk.start_transaction(op="task", name="ray test transaction"): + with sentry_sdk.start_span(op="test", name="ray client root span"): counter = Counter.remote() worker_envelopes = ray.get(counter.increment.remote()) client_envelope = sentry_sdk.get_client().transport.envelopes[0] - client_transaction = client_envelope.get_transaction_event() + client_root_span = client_envelope.get_transaction_event() # Spans for submitting the actor task are not created (actors are not supported yet) - assert client_transaction["spans"] == [] + assert client_root_span["spans"] == [] - # Transaction are not yet created when executing ray actors (actors are not supported yet) + # Root spans are not yet automatically created when executing ray actors (actors are not supported yet) assert worker_envelopes == [] @@ -204,12 +206,14 @@ def __init__(self): self.n = 0 def increment(self): - with sentry_sdk.start_span(op="task", name="example actor execution"): + with sentry_sdk.start_span( + op="test", name="custom span in actor execution", only_if_parent=True + ): 1 / 0 return sentry_sdk.get_client().transport.envelopes - with sentry_sdk.start_transaction(op="task", name="ray test transaction"): + with sentry_sdk.start_span(op="test", name="ray client root span"): with pytest.raises(ZeroDivisionError): counter = Counter.remote() future = counter.increment.remote() diff --git a/tests/integrations/redis/asyncio/test_redis_asyncio.py b/tests/integrations/redis/asyncio/test_redis_asyncio.py index 17130b337b..e735d478c9 100644 --- a/tests/integrations/redis/asyncio/test_redis_asyncio.py +++ b/tests/integrations/redis/asyncio/test_redis_asyncio.py @@ -1,6 +1,6 @@ import pytest -from sentry_sdk import capture_message, start_transaction +import sentry_sdk from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.redis import RedisIntegration from tests.conftest import ApproxDict @@ -16,7 +16,7 @@ async def test_async_basic(sentry_init, capture_events): connection = FakeRedis() await connection.get("foobar") - capture_message("hi") + sentry_sdk.capture_message("hi") (event,) = events (crumb,) = event["breadcrumbs"]["values"] @@ -54,7 +54,7 @@ async def test_async_redis_pipeline( events = capture_events() connection = FakeRedis() - with start_transaction(): + with sentry_sdk.start_span(): pipeline = connection.pipeline(transaction=is_transaction) pipeline.get("foo") pipeline.set("bar", 1) @@ -65,12 +65,10 @@ async def test_async_redis_pipeline( (span,) = event["spans"] assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" + assert span["data"]["redis.commands.count"] == 3 + assert span["data"]["redis.commands.first_ten"] == expected_first_ten assert span["data"] == ApproxDict( { - "redis.commands": { - "count": 3, - "first_ten": expected_first_ten, - }, SPANDATA.DB_SYSTEM: "redis", SPANDATA.DB_NAME: "0", SPANDATA.SERVER_ADDRESS: connection.connection_pool.connection_kwargs.get( @@ -94,7 +92,7 @@ async def test_async_span_origin(sentry_init, capture_events): events = capture_events() connection = FakeRedis() - with start_transaction(name="custom_transaction"): + with sentry_sdk.start_span(name="custom_transaction"): # default case await connection.set("somekey", "somevalue") diff --git a/tests/integrations/redis/cluster/test_redis_cluster.py b/tests/integrations/redis/cluster/test_redis_cluster.py index 83d1b45cc9..43bd3e3392 100644 --- a/tests/integrations/redis/cluster/test_redis_cluster.py +++ b/tests/integrations/redis/cluster/test_redis_cluster.py @@ -1,7 +1,7 @@ import pytest -from sentry_sdk import capture_message + +import sentry_sdk from sentry_sdk.consts import SPANDATA -from sentry_sdk.api import start_transaction from sentry_sdk.integrations.redis import RedisIntegration from tests.conftest import ApproxDict @@ -27,7 +27,7 @@ def test_rediscluster_breadcrumb(sentry_init, capture_events): rc = redis.RedisCluster(host="localhost", port=6379) rc.get("foobar") - capture_message("hi") + sentry_sdk.capture_message("hi") (event,) = events crumbs = event["breadcrumbs"]["values"] @@ -68,7 +68,7 @@ def test_rediscluster_basic(sentry_init, capture_events, send_default_pii, descr ) events = capture_events() - with start_transaction(): + with sentry_sdk.start_span(): rc = redis.RedisCluster(host="localhost", port=6379) rc.set("bar", 1) @@ -117,7 +117,7 @@ def test_rediscluster_pipeline( events = capture_events() rc = redis.RedisCluster(host="localhost", port=6379) - with start_transaction(): + with sentry_sdk.start_span(): pipeline = rc.pipeline() pipeline.get("foo") pipeline.set("bar", 1) @@ -128,12 +128,10 @@ def test_rediscluster_pipeline( (span,) = event["spans"] assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" + assert span["data"]["redis.commands.count"] == 3 + assert span["data"]["redis.commands.first_ten"] == expected_first_ten assert span["data"] == ApproxDict( { - "redis.commands": { - "count": 3, - "first_ten": expected_first_ten, - }, SPANDATA.DB_SYSTEM: "redis", # ClusterNode converts localhost to 127.0.0.1 SPANDATA.SERVER_ADDRESS: "127.0.0.1", @@ -154,7 +152,7 @@ def test_rediscluster_span_origin(sentry_init, capture_events): events = capture_events() rc = redis.RedisCluster(host="localhost", port=6379) - with start_transaction(name="custom_transaction"): + with sentry_sdk.start_span(name="custom_transaction"): # default case rc.set("somekey", "somevalue") diff --git a/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py b/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py index 993a2962ca..85970978dd 100644 --- a/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py +++ b/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py @@ -1,6 +1,6 @@ import pytest -from sentry_sdk import capture_message, start_transaction +import sentry_sdk from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.redis import RedisIntegration from tests.conftest import ApproxDict @@ -40,7 +40,7 @@ async def test_async_breadcrumb(sentry_init, capture_events): connection = cluster.RedisCluster(host="localhost", port=6379) await connection.get("foobar") - capture_message("hi") + sentry_sdk.capture_message("hi") (event,) = events (crumb,) = event["breadcrumbs"]["values"] @@ -78,7 +78,7 @@ async def test_async_basic(sentry_init, capture_events, send_default_pii, descri events = capture_events() connection = cluster.RedisCluster(host="localhost", port=6379) - with start_transaction(): + with sentry_sdk.start_span(): await connection.set("bar", 1) (event,) = events @@ -120,7 +120,7 @@ async def test_async_redis_pipeline( events = capture_events() connection = cluster.RedisCluster(host="localhost", port=6379) - with start_transaction(): + with sentry_sdk.start_span(): pipeline = connection.pipeline() pipeline.get("foo") pipeline.set("bar", 1) @@ -131,12 +131,10 @@ async def test_async_redis_pipeline( (span,) = event["spans"] assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" + assert span["data"]["redis.commands.count"] == 3 + assert span["data"]["redis.commands.first_ten"] == expected_first_ten assert span["data"] == ApproxDict( { - "redis.commands": { - "count": 3, - "first_ten": expected_first_ten, - }, SPANDATA.DB_SYSTEM: "redis", # ClusterNode converts localhost to 127.0.0.1 SPANDATA.SERVER_ADDRESS: "127.0.0.1", @@ -158,7 +156,7 @@ async def test_async_span_origin(sentry_init, capture_events): events = capture_events() connection = cluster.RedisCluster(host="localhost", port=6379) - with start_transaction(name="custom_transaction"): + with sentry_sdk.start_span(name="custom_transaction"): # default case await connection.set("somekey", "somevalue") diff --git a/tests/integrations/redis/test_redis.py b/tests/integrations/redis/test_redis.py index 5173885f33..4afee93c59 100644 --- a/tests/integrations/redis/test_redis.py +++ b/tests/integrations/redis/test_redis.py @@ -3,7 +3,7 @@ import pytest from fakeredis import FakeStrictRedis -from sentry_sdk import capture_message, start_transaction +import sentry_sdk from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.redis import RedisIntegration @@ -23,7 +23,7 @@ def test_basic(sentry_init, capture_events): connection = FakeStrictRedis() connection.get("foobar") - capture_message("hi") + sentry_sdk.capture_message("hi") (event,) = events (crumb,) = event["breadcrumbs"]["values"] @@ -60,7 +60,7 @@ def test_redis_pipeline( events = capture_events() connection = FakeStrictRedis() - with start_transaction(): + with sentry_sdk.start_span(): pipeline = connection.pipeline(transaction=is_transaction) pipeline.get("foo") pipeline.set("bar", 1) @@ -72,17 +72,15 @@ def test_redis_pipeline( assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" assert span["data"][SPANDATA.DB_SYSTEM] == "redis" - assert span["data"]["redis.commands"] == { - "count": 3, - "first_ten": expected_first_ten, - } + assert span["data"]["redis.commands.count"] == 3 + assert span["data"]["redis.commands.first_ten"] == expected_first_ten assert span["tags"] == { "redis.transaction": is_transaction, "redis.is_cluster": False, } -def test_sensitive_data(sentry_init, capture_events): +def test_sensitive_data(sentry_init, capture_events, render_span_tree): # fakeredis does not support the AUTH command, so we need to mock it with mock.patch( "sentry_sdk.integrations.redis.utils._COMMANDS_INCLUDING_SENSITIVE_DATA", @@ -96,18 +94,22 @@ def test_sensitive_data(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis() - with start_transaction(): + with sentry_sdk.start_span(): connection.get( "this is super secret" ) # because fakeredis does not support AUTH we use GET instead (event,) = events - spans = event["spans"] - assert spans[0]["op"] == "db.redis" - assert spans[0]["description"] == "GET [Filtered]" + assert ( + render_span_tree(event) + == """\ +- op="": description=null + - op="db.redis": description="GET [Filtered]"\ +""" + ) -def test_pii_data_redacted(sentry_init, capture_events): +def test_pii_data_redacted(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[RedisIntegration()], traces_sample_rate=1.0, @@ -115,22 +117,26 @@ def test_pii_data_redacted(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis() - with start_transaction(): + with sentry_sdk.start_span(): connection.set("somekey1", "my secret string1") connection.set("somekey2", "my secret string2") connection.get("somekey2") connection.delete("somekey1", "somekey2") (event,) = events - spans = event["spans"] - assert spans[0]["op"] == "db.redis" - assert spans[0]["description"] == "SET 'somekey1' [Filtered]" - assert spans[1]["description"] == "SET 'somekey2' [Filtered]" - assert spans[2]["description"] == "GET 'somekey2'" - assert spans[3]["description"] == "DEL 'somekey1' [Filtered]" + assert ( + render_span_tree(event) + == """\ +- op="": description=null + - op="db.redis": description="SET 'somekey1' [Filtered]" + - op="db.redis": description="SET 'somekey2' [Filtered]" + - op="db.redis": description="GET 'somekey2'" + - op="db.redis": description="DEL 'somekey1' [Filtered]"\ +""" + ) -def test_pii_data_sent(sentry_init, capture_events): +def test_pii_data_sent(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[RedisIntegration()], traces_sample_rate=1.0, @@ -139,22 +145,26 @@ def test_pii_data_sent(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis() - with start_transaction(): + with sentry_sdk.start_span(): connection.set("somekey1", "my secret string1") connection.set("somekey2", "my secret string2") connection.get("somekey2") connection.delete("somekey1", "somekey2") (event,) = events - spans = event["spans"] - assert spans[0]["op"] == "db.redis" - assert spans[0]["description"] == "SET 'somekey1' 'my secret string1'" - assert spans[1]["description"] == "SET 'somekey2' 'my secret string2'" - assert spans[2]["description"] == "GET 'somekey2'" - assert spans[3]["description"] == "DEL 'somekey1' 'somekey2'" + assert ( + render_span_tree(event) + == """\ +- op="": description=null + - op="db.redis": description="SET 'somekey1' 'my secret string1'" + - op="db.redis": description="SET 'somekey2' 'my secret string2'" + - op="db.redis": description="GET 'somekey2'" + - op="db.redis": description="DEL 'somekey1' 'somekey2'"\ +""" + ) -def test_data_truncation(sentry_init, capture_events): +def test_data_truncation(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[RedisIntegration()], traces_sample_rate=1.0, @@ -163,22 +173,24 @@ def test_data_truncation(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis() - with start_transaction(): + with sentry_sdk.start_span(): long_string = "a" * 100000 connection.set("somekey1", long_string) short_string = "b" * 10 connection.set("somekey2", short_string) (event,) = events - spans = event["spans"] - assert spans[0]["op"] == "db.redis" - assert spans[0]["description"] == "SET 'somekey1' '%s..." % ( - long_string[: 1024 - len("...") - len("SET 'somekey1' '")], + assert ( + render_span_tree(event) + == f"""\ +- op="": description=null + - op="db.redis": description="SET 'somekey1' '{long_string[: 1024 - len("...") - len("SET 'somekey1' '")]}..." + - op="db.redis": description="SET 'somekey2' 'bbbbbbbbbb'"\ +""" # noqa: E221 ) - assert spans[1]["description"] == "SET 'somekey2' '%s'" % (short_string,) -def test_data_truncation_custom(sentry_init, capture_events): +def test_data_truncation_custom(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[RedisIntegration(max_data_size=30)], traces_sample_rate=1.0, @@ -187,19 +199,21 @@ def test_data_truncation_custom(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis() - with start_transaction(): + with sentry_sdk.start_span(): long_string = "a" * 100000 connection.set("somekey1", long_string) short_string = "b" * 10 connection.set("somekey2", short_string) (event,) = events - spans = event["spans"] - assert spans[0]["op"] == "db.redis" - assert spans[0]["description"] == "SET 'somekey1' '%s..." % ( - long_string[: 30 - len("...") - len("SET 'somekey1' '")], + assert ( + render_span_tree(event) + == f"""\ +- op="": description=null + - op="db.redis": description="SET 'somekey1' '{long_string[: 30 - len("...") - len("SET 'somekey1' '")]}..." + - op="db.redis": description="SET 'somekey2' '{short_string}'"\ +""" # noqa: E221 ) - assert spans[1]["description"] == "SET 'somekey2' '%s'" % (short_string,) def test_breadcrumbs(sentry_init, capture_events): @@ -216,7 +230,7 @@ def test_breadcrumbs(sentry_init, capture_events): short_string = "b" * 10 connection.set("somekey2", short_string) - capture_message("hi") + sentry_sdk.capture_message("hi") (event,) = events crumbs = event["breadcrumbs"]["values"] @@ -254,7 +268,7 @@ def test_db_connection_attributes_client(sentry_init, capture_events): ) events = capture_events() - with start_transaction(): + with sentry_sdk.start_span(): connection = FakeStrictRedis(connection_pool=MOCK_CONNECTION_POOL) connection.get("foobar") @@ -276,7 +290,7 @@ def test_db_connection_attributes_pipeline(sentry_init, capture_events): ) events = capture_events() - with start_transaction(): + with sentry_sdk.start_span(): connection = FakeStrictRedis(connection_pool=MOCK_CONNECTION_POOL) pipeline = connection.pipeline(transaction=False) pipeline.get("foo") @@ -303,7 +317,7 @@ def test_span_origin(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis() - with start_transaction(name="custom_transaction"): + with sentry_sdk.start_span(name="custom_transaction"): # default case connection.set("somekey", "somevalue") diff --git a/tests/integrations/redis/test_redis_cache_module.py b/tests/integrations/redis/test_redis_cache_module.py index f118aa53f5..75f58d346d 100644 --- a/tests/integrations/redis/test_redis_cache_module.py +++ b/tests/integrations/redis/test_redis_cache_module.py @@ -14,7 +14,7 @@ FAKEREDIS_VERSION = parse_version(fakeredis.__version__) -def test_no_cache_basic(sentry_init, capture_events): +def test_no_cache_basic(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[ RedisIntegration(), @@ -24,16 +24,20 @@ def test_no_cache_basic(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="cache"): connection.get("mycachekey") (event,) = events - spans = event["spans"] - assert len(spans) == 1 - assert spans[0]["op"] == "db.redis" + assert ( + render_span_tree(event) + == """\ +- op="cache": description=null + - op="db.redis": description="GET 'mycachekey'"\ +""" + ) -def test_cache_basic(sentry_init, capture_events): +def test_cache_basic(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[ RedisIntegration( @@ -45,7 +49,7 @@ def test_cache_basic(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="cache"): connection.hget("mycachekey", "myfield") connection.get("mycachekey") connection.set("mycachekey1", "bla") @@ -53,31 +57,25 @@ def test_cache_basic(sentry_init, capture_events): connection.mget("mycachekey1", "mycachekey2") (event,) = events - spans = event["spans"] - assert len(spans) == 9 - - # no cache support for hget command - assert spans[0]["op"] == "db.redis" - assert spans[0]["tags"]["redis.command"] == "HGET" - - assert spans[1]["op"] == "cache.get" - assert spans[2]["op"] == "db.redis" - assert spans[2]["tags"]["redis.command"] == "GET" - - assert spans[3]["op"] == "cache.put" - assert spans[4]["op"] == "db.redis" - assert spans[4]["tags"]["redis.command"] == "SET" - - assert spans[5]["op"] == "cache.put" - assert spans[6]["op"] == "db.redis" - assert spans[6]["tags"]["redis.command"] == "SETEX" - - assert spans[7]["op"] == "cache.get" - assert spans[8]["op"] == "db.redis" - assert spans[8]["tags"]["redis.command"] == "MGET" + # no cache support for HGET command + assert ( + render_span_tree(event) + == """\ +- op="cache": description=null + - op="db.redis": description="HGET 'mycachekey' [Filtered]" + - op="cache.get": description="mycachekey" + - op="db.redis": description="GET 'mycachekey'" + - op="cache.put": description="mycachekey1" + - op="db.redis": description="SET 'mycachekey1' [Filtered]" + - op="cache.put": description="mycachekey2" + - op="db.redis": description="SETEX 'mycachekey2' [Filtered] [Filtered]" + - op="cache.get": description="mycachekey1, mycachekey2" + - op="db.redis": description="MGET 'mycachekey1' [Filtered]"\ +""" + ) -def test_cache_keys(sentry_init, capture_events): +def test_cache_keys(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[ RedisIntegration( @@ -89,30 +87,25 @@ def test_cache_keys(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="cache"): connection.get("somethingelse") connection.get("blub") connection.get("blubkeything") connection.get("bl") (event,) = events - spans = event["spans"] - assert len(spans) == 6 - assert spans[0]["op"] == "db.redis" - assert spans[0]["description"] == "GET 'somethingelse'" - - assert spans[1]["op"] == "cache.get" - assert spans[1]["description"] == "blub" - assert spans[2]["op"] == "db.redis" - assert spans[2]["description"] == "GET 'blub'" - - assert spans[3]["op"] == "cache.get" - assert spans[3]["description"] == "blubkeything" - assert spans[4]["op"] == "db.redis" - assert spans[4]["description"] == "GET 'blubkeything'" - - assert spans[5]["op"] == "db.redis" - assert spans[5]["description"] == "GET 'bl'" + assert ( + render_span_tree(event) + == """\ +- op="cache": description=null + - op="db.redis": description="GET 'somethingelse'" + - op="cache.get": description="blub" + - op="db.redis": description="GET 'blub'" + - op="cache.get": description="blubkeything" + - op="db.redis": description="GET 'blubkeything'" + - op="db.redis": description="GET 'bl'"\ +""" + ) def test_cache_data(sentry_init, capture_events): @@ -127,13 +120,13 @@ def test_cache_data(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis(host="mycacheserver.io", port=6378) - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="cache"): connection.get("mycachekey") connection.set("mycachekey", "事实胜于雄辩") connection.get("mycachekey") (event,) = events - spans = event["spans"] + spans = sorted(event["spans"], key=lambda x: x["start_timestamp"]) assert len(spans) == 6 @@ -210,7 +203,7 @@ def test_cache_prefixes(sentry_init, capture_events): events = capture_events() connection = FakeStrictRedis() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="cache"): connection.mget("yes", "no") connection.mget("no", 1, "yes") connection.mget("no", "yes.1", "yes.2") @@ -222,7 +215,7 @@ def test_cache_prefixes(sentry_init, capture_events): (event,) = events - spans = event["spans"] + spans = sorted(event["spans"], key=lambda x: x["start_timestamp"]) assert len(spans) == 13 # 8 db spans + 5 cache spans cache_spans = [span for span in spans if span["op"] == "cache.get"] diff --git a/tests/integrations/redis/test_redis_cache_module_async.py b/tests/integrations/redis/test_redis_cache_module_async.py index d607f92fbd..d4ce4936bb 100644 --- a/tests/integrations/redis/test_redis_cache_module_async.py +++ b/tests/integrations/redis/test_redis_cache_module_async.py @@ -21,7 +21,7 @@ @pytest.mark.asyncio -async def test_no_cache_basic(sentry_init, capture_events): +async def test_no_cache_basic(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[ RedisIntegration(), @@ -31,17 +31,21 @@ async def test_no_cache_basic(sentry_init, capture_events): events = capture_events() connection = FakeRedisAsync() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(): await connection.get("myasynccachekey") (event,) = events - spans = event["spans"] - assert len(spans) == 1 - assert spans[0]["op"] == "db.redis" + assert ( + render_span_tree(event) + == """\ +- op="": description=null + - op="db.redis": description="GET 'myasynccachekey'"\ +""" + ) @pytest.mark.asyncio -async def test_cache_basic(sentry_init, capture_events): +async def test_cache_basic(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[ RedisIntegration( @@ -53,19 +57,22 @@ async def test_cache_basic(sentry_init, capture_events): events = capture_events() connection = FakeRedisAsync() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(): await connection.get("myasynccachekey") (event,) = events - spans = event["spans"] - assert len(spans) == 2 - - assert spans[0]["op"] == "cache.get" - assert spans[1]["op"] == "db.redis" + assert ( + render_span_tree(event) + == """\ +- op="": description=null + - op="cache.get": description="myasynccachekey" + - op="db.redis": description="GET 'myasynccachekey'"\ +""" + ) @pytest.mark.asyncio -async def test_cache_keys(sentry_init, capture_events): +async def test_cache_keys(sentry_init, capture_events, render_span_tree): sentry_init( integrations=[ RedisIntegration( @@ -77,30 +84,25 @@ async def test_cache_keys(sentry_init, capture_events): events = capture_events() connection = FakeRedisAsync() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(): await connection.get("asomethingelse") await connection.get("ablub") await connection.get("ablubkeything") await connection.get("abl") (event,) = events - spans = event["spans"] - assert len(spans) == 6 - assert spans[0]["op"] == "db.redis" - assert spans[0]["description"] == "GET 'asomethingelse'" - - assert spans[1]["op"] == "cache.get" - assert spans[1]["description"] == "ablub" - assert spans[2]["op"] == "db.redis" - assert spans[2]["description"] == "GET 'ablub'" - - assert spans[3]["op"] == "cache.get" - assert spans[3]["description"] == "ablubkeything" - assert spans[4]["op"] == "db.redis" - assert spans[4]["description"] == "GET 'ablubkeything'" - - assert spans[5]["op"] == "db.redis" - assert spans[5]["description"] == "GET 'abl'" + assert ( + render_span_tree(event) + == """\ +- op="": description=null + - op="db.redis": description="GET 'asomethingelse'" + - op="cache.get": description="ablub" + - op="db.redis": description="GET 'ablub'" + - op="cache.get": description="ablubkeything" + - op="db.redis": description="GET 'ablubkeything'" + - op="db.redis": description="GET 'abl'"\ +""" + ) @pytest.mark.asyncio @@ -116,13 +118,13 @@ async def test_cache_data(sentry_init, capture_events): events = capture_events() connection = FakeRedisAsync(host="mycacheserver.io", port=6378) - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(): await connection.get("myasynccachekey") await connection.set("myasynccachekey", "事实胜于雄辩") await connection.get("myasynccachekey") (event,) = events - spans = event["spans"] + spans = sorted(event["spans"], key=lambda x: x["start_timestamp"]) assert len(spans) == 6 diff --git a/tests/integrations/redis_py_cluster_legacy/test_redis_py_cluster_legacy.py b/tests/integrations/redis_py_cluster_legacy/test_redis_py_cluster_legacy.py index 36a27d569d..a530fec115 100644 --- a/tests/integrations/redis_py_cluster_legacy/test_redis_py_cluster_legacy.py +++ b/tests/integrations/redis_py_cluster_legacy/test_redis_py_cluster_legacy.py @@ -4,7 +4,7 @@ import rediscluster from sentry_sdk import capture_message -from sentry_sdk.api import start_transaction +from sentry_sdk.api import start_span from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.redis import RedisIntegration from tests.conftest import ApproxDict @@ -84,7 +84,7 @@ def test_rediscluster_pipeline( events = capture_events() rc = rediscluster.RedisCluster(connection_pool=MOCK_CONNECTION_POOL) - with start_transaction(): + with start_span(name="redis"): pipeline = rc.pipeline() pipeline.get("foo") pipeline.set("bar", 1) @@ -95,12 +95,10 @@ def test_rediscluster_pipeline( (span,) = event["spans"] assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" + assert span["data"]["redis.commands.count"] == 3 + assert span["data"]["redis.commands.first_ten"] == expected_first_ten assert span["data"] == ApproxDict( { - "redis.commands": { - "count": 3, - "first_ten": expected_first_ten, - }, SPANDATA.DB_SYSTEM: "redis", SPANDATA.DB_NAME: "1", SPANDATA.SERVER_ADDRESS: "localhost", @@ -122,7 +120,7 @@ def test_db_connection_attributes_client(sentry_init, capture_events, redisclust events = capture_events() rc = rediscluster_cls(connection_pool=MOCK_CONNECTION_POOL) - with start_transaction(): + with start_span(name="redis"): rc.get("foobar") (event,) = events @@ -149,7 +147,7 @@ def test_db_connection_attributes_pipeline( events = capture_events() rc = rediscluster.RedisCluster(connection_pool=MOCK_CONNECTION_POOL) - with start_transaction(): + with start_span(name="redis"): pipeline = rc.pipeline() pipeline.get("foo") pipeline.execute() @@ -158,12 +156,11 @@ def test_db_connection_attributes_pipeline( (span,) = event["spans"] assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" + assert span["data"]["redis.commands.count"] == 1 + assert span["data"]["redis.commands.first_ten"] == ["GET 'foo'"] + assert span["data"] == ApproxDict( { - "redis.commands": { - "count": 1, - "first_ten": ["GET 'foo'"], - }, SPANDATA.DB_SYSTEM: "redis", SPANDATA.DB_NAME: "1", SPANDATA.SERVER_ADDRESS: "localhost", diff --git a/tests/integrations/requests/test_requests.py b/tests/integrations/requests/test_requests.py index 8cfc0f932f..3862763a75 100644 --- a/tests/integrations/requests/test_requests.py +++ b/tests/integrations/requests/test_requests.py @@ -43,8 +43,8 @@ def test_crumb_capture(sentry_init, capture_events): @pytest.mark.parametrize( "status_code,level", [ - (200, None), - (301, None), + (200, "info"), + (301, "info"), (403, "warning"), (405, "warning"), (500, "error"), @@ -66,12 +66,7 @@ def test_crumb_capture_client_error(sentry_init, capture_events, status_code, le (crumb,) = event["breadcrumbs"]["values"] assert crumb["type"] == "http" assert crumb["category"] == "httplib" - - if level is None: - assert "level" not in crumb - else: - assert crumb["level"] == level - + assert crumb["level"] == level assert crumb["data"] == ApproxDict( { "url": url, diff --git a/tests/integrations/rq/test_rq.py b/tests/integrations/rq/test_rq.py index e445b588be..a57a3d0dec 100644 --- a/tests/integrations/rq/test_rq.py +++ b/tests/integrations/rq/test_rq.py @@ -5,7 +5,6 @@ from fakeredis import FakeStrictRedis import sentry_sdk -from sentry_sdk import start_transaction from sentry_sdk.integrations.rq import RqIntegration from sentry_sdk.utils import parse_version @@ -119,7 +118,9 @@ def test_transaction_with_error( ) assert envelope["type"] == "transaction" - assert envelope["contexts"]["trace"] == error_event["contexts"]["trace"] + assert envelope["contexts"]["trace"] == DictionaryContaining( + error_event["contexts"]["trace"] + ) assert envelope["transaction"] == error_event["transaction"] assert envelope["extra"]["rq-job"] == DictionaryContaining( { @@ -150,8 +151,7 @@ def test_error_has_trace_context_if_tracing_disabled( def test_tracing_enabled( - sentry_init, - capture_events, + sentry_init, capture_events, DictionaryContaining # noqa: N803 ): sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0) events = capture_events() @@ -159,16 +159,17 @@ def test_tracing_enabled( queue = rq.Queue(connection=FakeStrictRedis()) worker = rq.SimpleWorker([queue], connection=queue.connection) - with start_transaction(op="rq transaction") as transaction: - queue.enqueue(crashing_job, foo=None) - worker.work(burst=True) + queue.enqueue(crashing_job, foo=None) + worker.work(burst=True) - error_event, envelope, _ = events + error_event, transaction = events assert error_event["transaction"] == "tests.integrations.rq.test_rq.crashing_job" - assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id - - assert envelope["contexts"]["trace"] == error_event["contexts"]["trace"] + assert transaction["transaction"] == "tests.integrations.rq.test_rq.crashing_job" + assert ( + DictionaryContaining(error_event["contexts"]["trace"]) + == transaction["contexts"]["trace"] + ) def test_tracing_disabled( @@ -221,34 +222,33 @@ def test_transaction_no_error( ) -def test_traces_sampler_gets_correct_values_in_sampling_context( - sentry_init, DictionaryContaining, ObjectDescribedBy # noqa:N803 -): +def test_traces_sampler_gets_correct_values_in_sampling_context(sentry_init): traces_sampler = mock.Mock(return_value=True) sentry_init(integrations=[RqIntegration()], traces_sampler=traces_sampler) queue = rq.Queue(connection=FakeStrictRedis()) worker = rq.SimpleWorker([queue], connection=queue.connection) - queue.enqueue(do_trick, "Bodhi", trick="roll over") + queue.enqueue( + do_trick, + "Bodhi", + {"age": 5}, + trick="roll over", + times=2, + followup=["fetch", "give paw"], + ) worker.work(burst=True) - traces_sampler.assert_any_call( - DictionaryContaining( - { - "rq_job": ObjectDescribedBy( - type=rq.job.Job, - attrs={ - "description": "tests.integrations.rq.test_rq.do_trick('Bodhi', trick='roll over')", - "result": "Bodhi, can you roll over? Good dog!", - "func_name": "tests.integrations.rq.test_rq.do_trick", - "args": ("Bodhi",), - "kwargs": {"trick": "roll over"}, - }, - ), - } - ) - ) + sampling_context = traces_sampler.call_args_list[0][0][0] + assert sampling_context["messaging.system"] == "rq" + assert sampling_context["rq.job.args.0"] == "Bodhi" + assert sampling_context["rq.job.args.1"] == "{'age': 5}" + assert sampling_context["rq.job.kwargs.trick"] == "roll over" + assert sampling_context["rq.job.kwargs.times"] == "2" + assert sampling_context["rq.job.kwargs.followup"] == "['fetch', 'give paw']" + assert sampling_context["rq.job.func"] == "do_trick" + assert sampling_context["messaging.message.id"] + assert sampling_context["messaging.destination.name"] == "default" @pytest.mark.skipif( diff --git a/tests/integrations/rust_tracing/test_rust_tracing.py b/tests/integrations/rust_tracing/test_rust_tracing.py index 893fc86966..9ab64843c4 100644 --- a/tests/integrations/rust_tracing/test_rust_tracing.py +++ b/tests/integrations/rust_tracing/test_rust_tracing.py @@ -11,7 +11,8 @@ RustTracingLevel, EventTypeMapping, ) -from sentry_sdk import start_transaction, capture_message +from sentry_sdk import start_span, capture_message +from tests.conftest import ApproxDict def _test_event_type_mapping(metadata: Dict[str, object]) -> EventTypeMapping: @@ -74,11 +75,11 @@ def test_on_new_span_on_close(sentry_init, capture_events): sentry_init(integrations=[integration], traces_sample_rate=1.0) events = capture_events() - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) sentry_first_rust_span = sentry_sdk.get_current_span() - _, rust_first_rust_span = rust_tracing.spans[3] + rust_first_rust_span = rust_tracing.spans[3] assert sentry_first_rust_span == rust_first_rust_span @@ -102,7 +103,7 @@ def test_on_new_span_on_close(sentry_init, capture_events): data = span["data"] assert data["use_memoized"] assert data["index"] == 10 - assert data["version"] is None + assert "version" not in data def test_nested_on_new_span_on_close(sentry_init, capture_events): @@ -115,23 +116,19 @@ def test_nested_on_new_span_on_close(sentry_init, capture_events): sentry_init(integrations=[integration], traces_sample_rate=1.0) events = capture_events() - with start_transaction(): + with start_span(): original_sentry_span = sentry_sdk.get_current_span() rust_tracing.new_span(RustTracingLevel.Info, 3, index_arg=10) sentry_first_rust_span = sentry_sdk.get_current_span() - _, rust_first_rust_span = rust_tracing.spans[3] # Use a different `index_arg` value for the inner span to help # distinguish the two at the end of the test rust_tracing.new_span(RustTracingLevel.Info, 5, index_arg=9) sentry_second_rust_span = sentry_sdk.get_current_span() - rust_parent_span, rust_second_rust_span = rust_tracing.spans[5] + rust_second_rust_span = rust_tracing.spans[5] assert rust_second_rust_span == sentry_second_rust_span - assert rust_parent_span == sentry_first_rust_span - assert rust_parent_span == rust_first_rust_span - assert rust_parent_span != rust_second_rust_span rust_tracing.close_span(5) @@ -171,12 +168,12 @@ def test_nested_on_new_span_on_close(sentry_init, capture_events): first_span_data = first_span["data"] assert first_span_data["use_memoized"] assert first_span_data["index"] == 10 - assert first_span_data["version"] is None + assert "version" not in first_span_data second_span_data = second_span["data"] assert second_span_data["use_memoized"] assert second_span_data["index"] == 9 - assert second_span_data["version"] is None + assert "version" not in second_span_data def test_on_new_span_without_transaction(sentry_init): @@ -192,7 +189,7 @@ def test_on_new_span_without_transaction(sentry_init): rust_tracing.new_span(RustTracingLevel.Info, 3) current_span = sentry_sdk.get_current_span() assert current_span is not None - assert current_span.containing_transaction is None + assert current_span.root_span is None def test_on_event_exception(sentry_init, capture_events): @@ -207,7 +204,7 @@ def test_on_event_exception(sentry_init, capture_events): events = capture_events() sentry_sdk.get_isolation_scope().clear_breadcrumbs() - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) # Mapped to Exception @@ -243,7 +240,7 @@ def test_on_event_breadcrumb(sentry_init, capture_events): events = capture_events() sentry_sdk.get_isolation_scope().clear_breadcrumbs() - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) # Mapped to Breadcrumb @@ -274,7 +271,7 @@ def test_on_event_event(sentry_init, capture_events): events = capture_events() sentry_sdk.get_isolation_scope().clear_breadcrumbs() - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) # Mapped to Event @@ -311,7 +308,7 @@ def test_on_event_ignored(sentry_init, capture_events): events = capture_events() sentry_sdk.get_isolation_scope().clear_breadcrumbs() - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) # Ignored @@ -344,7 +341,7 @@ def span_filter(metadata: Dict[str, object]) -> bool: sentry_init(integrations=[integration], traces_sample_rate=1.0) events = capture_events() - with start_transaction(): + with start_span(): original_sentry_span = sentry_sdk.get_current_span() # Span is not ignored @@ -377,16 +374,16 @@ def test_record(sentry_init): ) sentry_init(integrations=[integration], traces_sample_rate=1.0) - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) span_before_record = sentry_sdk.get_current_span().to_json() - assert span_before_record["data"]["version"] is None + assert "version" not in span_before_record["attributes"] rust_tracing.record(3) span_after_record = sentry_sdk.get_current_span().to_json() - assert span_after_record["data"]["version"] == "memoized" + assert span_after_record["attributes"]["version"] == "memoized" def test_record_in_ignored_span(sentry_init): @@ -403,18 +400,18 @@ def span_filter(metadata: Dict[str, object]) -> bool: ) sentry_init(integrations=[integration], traces_sample_rate=1.0) - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) span_before_record = sentry_sdk.get_current_span().to_json() - assert span_before_record["data"]["version"] is None + assert "version" not in span_before_record["attributes"] rust_tracing.new_span(RustTracingLevel.Trace, 5) rust_tracing.record(5) # `on_record()` should not do anything to the current Sentry span if the associated Rust span was ignored span_after_record = sentry_sdk.get_current_span().to_json() - assert span_after_record["data"]["version"] is None + assert "version" not in span_after_record["attributes"] @pytest.mark.parametrize( @@ -443,33 +440,37 @@ def test_include_tracing_fields( traces_sample_rate=1.0, send_default_pii=send_default_pii, ) - with start_transaction(): + with start_span(): rust_tracing.new_span(RustTracingLevel.Info, 3) span_before_record = sentry_sdk.get_current_span().to_json() if tracing_fields_expected: - assert span_before_record["data"]["version"] is None + assert "version" not in span_before_record["attributes"] else: - assert span_before_record["data"]["version"] == "[Filtered]" + assert span_before_record["attributes"]["version"] == "[Filtered]" rust_tracing.record(3) span_after_record = sentry_sdk.get_current_span().to_json() if tracing_fields_expected: - assert span_after_record["data"] == { - "thread.id": mock.ANY, - "thread.name": mock.ANY, - "use_memoized": True, - "version": "memoized", - "index": 10, - } + assert span_after_record["attributes"] == ApproxDict( + { + "thread.id": mock.ANY, + "thread.name": mock.ANY, + "use_memoized": True, + "version": "memoized", + "index": 10, + } + ) else: - assert span_after_record["data"] == { - "thread.id": mock.ANY, - "thread.name": mock.ANY, - "use_memoized": "[Filtered]", - "version": "[Filtered]", - "index": "[Filtered]", - } + assert span_after_record["attributes"] == ApproxDict( + { + "thread.id": mock.ANY, + "thread.name": mock.ANY, + "use_memoized": "[Filtered]", + "version": "[Filtered]", + "index": "[Filtered]", + } + ) diff --git a/tests/integrations/sanic/test_sanic.py b/tests/integrations/sanic/test_sanic.py index 0419127239..05b23cb215 100644 --- a/tests/integrations/sanic/test_sanic.py +++ b/tests/integrations/sanic/test_sanic.py @@ -346,8 +346,9 @@ def __init__( expected_status, expected_transaction_name, expected_source=None, + has_transaction_event=True, ): - # type: (Iterable[Optional[Container[int]]], str, int, Optional[str], Optional[str]) -> None + # type: (Iterable[Optional[Container[int]]], str, int, Optional[str], Optional[str], bool) -> None """ expected_transaction_name of None indicates we expect to not receive a transaction """ @@ -356,6 +357,7 @@ def __init__( self.expected_status = expected_status self.expected_transaction_name = expected_transaction_name self.expected_source = expected_source + self.has_transaction_event = has_transaction_event @pytest.mark.skipif( @@ -386,6 +388,7 @@ def __init__( url="/404", expected_status=404, expected_transaction_name=None, + has_transaction_event=False, ), TransactionTestConfig( # With no ignored HTTP statuses, we should get transactions for 404 errors @@ -401,6 +404,7 @@ def __init__( url="/message", expected_status=200, expected_transaction_name=None, + has_transaction_event=False, ), ], ) @@ -430,9 +434,7 @@ def test_transactions(test_config, sentry_init, app, capture_events): (transaction_event, *_) = [*transaction_events, None] # We should have no transaction event if and only if we expect no transactions - assert (transaction_event is None) == ( - test_config.expected_transaction_name is None - ) + assert bool(transaction_event) == test_config.has_transaction_event # If a transaction was expected, ensure it is correct assert ( diff --git a/tests/integrations/socket/test_socket.py b/tests/integrations/socket/test_socket.py index 389256de33..500e9b5608 100644 --- a/tests/integrations/socket/test_socket.py +++ b/tests/integrations/socket/test_socket.py @@ -1,6 +1,6 @@ import socket -from sentry_sdk import start_transaction +from sentry_sdk import start_span from sentry_sdk.integrations.socket import SocketIntegration from tests.conftest import ApproxDict @@ -9,7 +9,7 @@ def test_getaddrinfo_trace(sentry_init, capture_events): sentry_init(integrations=[SocketIntegration()], traces_sample_rate=1.0) events = capture_events() - with start_transaction(): + with start_span(name="socket"): socket.getaddrinfo("example.com", 443) (event,) = events @@ -31,7 +31,7 @@ def test_create_connection_trace(sentry_init, capture_events): sentry_init(integrations=[SocketIntegration()], traces_sample_rate=1.0) events = capture_events() - with start_transaction(): + with start_span(name="socket"): socket.create_connection(("example.com", 443), timeout, None) (event,) = events @@ -42,9 +42,9 @@ def test_create_connection_trace(sentry_init, capture_events): assert connect_span["description"] == "example.com:443" assert connect_span["data"] == ApproxDict( { - "address": ["example.com", 443], + "address.host": "example.com", + "address.port": 443, "timeout": timeout, - "source_address": None, } ) @@ -65,7 +65,7 @@ def test_span_origin(sentry_init, capture_events): ) events = capture_events() - with start_transaction(name="foo"): + with start_span(name="foo"): socket.create_connection(("example.com", 443), 1, None) (event,) = events diff --git a/tests/integrations/sqlalchemy/test_sqlalchemy.py b/tests/integrations/sqlalchemy/test_sqlalchemy.py index 2b95fe02d4..999b17a19f 100644 --- a/tests/integrations/sqlalchemy/test_sqlalchemy.py +++ b/tests/integrations/sqlalchemy/test_sqlalchemy.py @@ -1,8 +1,10 @@ +import contextlib import os from datetime import datetime from unittest import mock import pytest +from freezegun import freeze_time from sqlalchemy import Column, ForeignKey, Integer, String, create_engine from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.declarative import declarative_base @@ -10,7 +12,6 @@ from sqlalchemy import text import sentry_sdk -from sentry_sdk import capture_message, start_transaction from sentry_sdk.consts import DEFAULT_MAX_VALUE_LENGTH, SPANDATA from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration from sentry_sdk.serializer import MAX_EVENT_BYTES @@ -53,7 +54,7 @@ class Address(Base): assert session.query(Person).first() == bob - capture_message("hi") + sentry_sdk.capture_message("hi") (event,) = events @@ -110,7 +111,7 @@ class Address(Base): Session = sessionmaker(bind=engine) # noqa: N806 session = Session() - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): with session.begin_nested(): session.query(Person).first() @@ -134,7 +135,7 @@ class Address(Base): assert ( render_span_tree(event) == """\ -- op=null: description=null +- op="test_transaction": description=null - op="db": description="SAVEPOINT sa_savepoint_1" - op="db": description="SELECT person.id AS person_id, person.name AS person_name \\nFROM person\\n LIMIT ? OFFSET ?" - op="db": description="RELEASE SAVEPOINT sa_savepoint_1" @@ -184,7 +185,7 @@ class Address(Base): Session = sessionmaker(bind=engine) # noqa: N806 session = Session() - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): with session.begin_nested(): session.query(Person).first() @@ -216,7 +217,7 @@ def test_long_sql_query_preserved(sentry_init, capture_events): engine = create_engine( "sqlite:///:memory:", connect_args={"check_same_thread": False} ) - with start_transaction(name="test"): + with sentry_sdk.start_span(name="test"): with engine.connect() as con: con.execute(text(" UNION ".join("SELECT {}".format(i) for i in range(100)))) @@ -245,7 +246,7 @@ def processor(event, hint): engine = create_engine( "sqlite:///:memory:", connect_args={"check_same_thread": False} ) - with start_transaction(name="test"): + with sentry_sdk.start_span(name="test"): with engine.connect() as con: for _ in range(1500): con.execute( @@ -294,18 +295,16 @@ def test_engine_name_not_string(sentry_init): def test_query_source_disabled(sentry_init, capture_events): - sentry_options = { - "integrations": [SqlalchemyIntegration()], - "enable_tracing": True, - "enable_db_query_source": False, - "db_query_source_threshold_ms": 0, - } - - sentry_init(**sentry_options) + sentry_init( + integrations=[SqlalchemyIntegration()], + traces_sample_rate=1.0, + enable_db_query_source=False, + db_query_source_threshold_ms=0, + ) events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): Base = declarative_base() # noqa: N806 class Person(Base): @@ -347,7 +346,7 @@ class Person(Base): def test_query_source_enabled(sentry_init, capture_events, enable_db_query_source): sentry_options = { "integrations": [SqlalchemyIntegration()], - "enable_tracing": True, + "traces_sample_rate": 1.0, "db_query_source_threshold_ms": 0, } if enable_db_query_source is not None: @@ -357,7 +356,7 @@ def test_query_source_enabled(sentry_init, capture_events, enable_db_query_sourc events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): Base = declarative_base() # noqa: N806 class Person(Base): @@ -398,13 +397,13 @@ class Person(Base): def test_query_source(sentry_init, capture_events): sentry_init( integrations=[SqlalchemyIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=0, ) events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): Base = declarative_base() # noqa: N806 class Person(Base): @@ -463,7 +462,7 @@ def test_query_source_with_module_in_search_path(sentry_init, capture_events): """ sentry_init( integrations=[SqlalchemyIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=0, ) @@ -474,7 +473,7 @@ def test_query_source_with_module_in_search_path(sentry_init, capture_events): query_first_model_from_session, ) - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): Base = declarative_base() # noqa: N806 class Person(Base): @@ -526,13 +525,13 @@ class Person(Base): def test_no_query_source_if_duration_too_short(sentry_init, capture_events): sentry_init( integrations=[SqlalchemyIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=100, ) events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): Base = declarative_base() # noqa: N806 class Person(Base): @@ -553,11 +552,13 @@ class Person(Base): class fake_record_sql_queries: # noqa: N801 def __init__(self, *args, **kwargs): - with record_sql_queries(*args, **kwargs) as span: - self.span = span + with freeze_time(datetime(2024, 1, 1, microsecond=0)): + with record_sql_queries(*args, **kwargs) as span: + self.span = span + freezer = freeze_time(datetime(2024, 1, 1, microsecond=99999)) + freezer.start() - self.span.start_timestamp = datetime(2024, 1, 1, microsecond=0) - self.span.timestamp = datetime(2024, 1, 1, microsecond=99999) + freezer.stop() def __enter__(self): return self.span @@ -592,13 +593,13 @@ def __exit__(self, type, value, traceback): def test_query_source_if_duration_over_threshold(sentry_init, capture_events): sentry_init( integrations=[SqlalchemyIntegration()], - enable_tracing=True, + traces_sample_rate=1.0, enable_db_query_source=True, db_query_source_threshold_ms=100, ) events = capture_events() - with start_transaction(name="test_transaction", sampled=True): + with sentry_sdk.start_span(name="test_transaction", sampled=True): Base = declarative_base() # noqa: N806 class Person(Base): @@ -617,19 +618,15 @@ class Person(Base): bob = Person(name="Bob") session.add(bob) - class fake_record_sql_queries: # noqa: N801 - def __init__(self, *args, **kwargs): + @contextlib.contextmanager + def fake_record_sql_queries(*args, **kwargs): # noqa: N801 + with freeze_time(datetime(2024, 1, 1, second=0)): with record_sql_queries(*args, **kwargs) as span: - self.span = span + freezer = freeze_time(datetime(2024, 1, 1, second=1)) + freezer.start() + yield span - self.span.start_timestamp = datetime(2024, 1, 1, microsecond=0) - self.span.timestamp = datetime(2024, 1, 1, microsecond=101000) - - def __enter__(self): - return self.span - - def __exit__(self, type, value, traceback): - pass + freezer.stop() with mock.patch( "sentry_sdk.integrations.sqlalchemy.record_sql_queries", @@ -682,7 +679,7 @@ def test_span_origin(sentry_init, capture_events): engine = create_engine( "sqlite:///:memory:", connect_args={"check_same_thread": False} ) - with start_transaction(name="foo"): + with sentry_sdk.start_span(name="foo"): with engine.connect() as con: con.execute(text("SELECT 0")) diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py index bc445bf8f2..bf89729b35 100644 --- a/tests/integrations/starlette/test_starlette.py +++ b/tests/integrations/starlette/test_starlette.py @@ -13,11 +13,13 @@ from sentry_sdk import capture_message, get_baggage, get_traceparent from sentry_sdk.integrations.asgi import SentryAsgiMiddleware +from sentry_sdk.integrations.logging import LoggingIntegration from sentry_sdk.integrations.starlette import ( StarletteIntegration, StarletteRequestExtractor, ) from sentry_sdk.utils import parse_version +from tests.conftest import ApproxDict import starlette from starlette.authentication import ( @@ -666,9 +668,9 @@ def test_middleware_spans(sentry_init, capture_events): "AuthenticationMiddleware", "ExceptionMiddleware", "AuthenticationMiddleware", # 'op': 'middleware.starlette.send' - "ServerErrorMiddleware", # 'op': 'middleware.starlette.send' "AuthenticationMiddleware", # 'op': 'middleware.starlette.send' "ServerErrorMiddleware", # 'op': 'middleware.starlette.send' + "ServerErrorMiddleware", # 'op': 'middleware.starlette.send' ] assert len(transaction_event["spans"]) == len(expected_middleware_spans) @@ -743,23 +745,23 @@ def test_middleware_callback_spans(sentry_init, capture_events): }, { "op": "middleware.starlette.send", - "description": "ServerErrorMiddleware.__call__.._send", - "tags": {"starlette.middleware_name": "SampleMiddleware"}, + "description": "SampleMiddleware.__call__..do_stuff", + "tags": {"starlette.middleware_name": "ExceptionMiddleware"}, }, { "op": "middleware.starlette.send", - "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send", - "tags": {"starlette.middleware_name": "ServerErrorMiddleware"}, + "description": "ServerErrorMiddleware.__call__.._send", + "tags": {"starlette.middleware_name": "SampleMiddleware"}, }, { "op": "middleware.starlette.send", - "description": "SampleMiddleware.__call__..do_stuff", - "tags": {"starlette.middleware_name": "ExceptionMiddleware"}, + "description": "ServerErrorMiddleware.__call__.._send", + "tags": {"starlette.middleware_name": "SampleMiddleware"}, }, { "op": "middleware.starlette.send", - "description": "ServerErrorMiddleware.__call__.._send", - "tags": {"starlette.middleware_name": "SampleMiddleware"}, + "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send", + "tags": {"starlette.middleware_name": "ServerErrorMiddleware"}, }, { "op": "middleware.starlette.send", @@ -772,7 +774,7 @@ def test_middleware_callback_spans(sentry_init, capture_events): for span in transaction_event["spans"]: assert span["op"] == expected[idx]["op"] assert span["description"] == expected[idx]["description"] - assert span["tags"] == expected[idx]["tags"] + assert span["tags"] == ApproxDict(expected[idx]["tags"]) idx += 1 @@ -837,16 +839,16 @@ def test_middleware_partial_receive_send(sentry_init, capture_events): "description": "ServerErrorMiddleware.__call__.._send", "tags": {"starlette.middleware_name": "SamplePartialReceiveSendMiddleware"}, }, - { - "op": "middleware.starlette.send", - "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send", - "tags": {"starlette.middleware_name": "ServerErrorMiddleware"}, - }, { "op": "middleware.starlette", "description": "ExceptionMiddleware", "tags": {"starlette.middleware_name": "ExceptionMiddleware"}, }, + { + "op": "middleware.starlette.send", + "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send", + "tags": {"starlette.middleware_name": "ServerErrorMiddleware"}, + }, { "op": "middleware.starlette.send", "description": "functools.partial(.my_send at ", @@ -863,7 +865,7 @@ def test_middleware_partial_receive_send(sentry_init, capture_events): for span in transaction_event["spans"]: assert span["op"] == expected[idx]["op"] assert span["description"].startswith(expected[idx]["description"]) - assert span["tags"] == expected[idx]["tags"] + assert span["tags"] == ApproxDict(expected[idx]["tags"]) idx += 1 @@ -909,13 +911,13 @@ def test_active_thread_id(sentry_init, capture_envelopes, teardown_profiling, en sentry_init( traces_sample_rate=1.0, profiles_sample_rate=1.0, + integrations=[StarletteIntegration()], ) app = starlette_app_factory() - asgi_app = SentryAsgiMiddleware(app) envelopes = capture_envelopes() - client = TestClient(asgi_app) + client = TestClient(app) response = client.get(endpoint) assert response.status_code == 200 @@ -942,7 +944,9 @@ def test_active_thread_id(sentry_init, capture_envelopes, teardown_profiling, en def test_original_request_not_scrubbed(sentry_init, capture_events): - sentry_init(integrations=[StarletteIntegration()]) + sentry_init( + integrations=[StarletteIntegration(), LoggingIntegration(event_level="ERROR")] + ) events = capture_events() @@ -1181,82 +1185,6 @@ def test_span_origin(sentry_init, capture_events): assert span["origin"] == "auto.http.starlette" -class NonIterableContainer: - """Wraps any container and makes it non-iterable. - - Used to test backwards compatibility with our old way of defining failed_request_status_codes, which allowed - passing in a list of (possibly non-iterable) containers. The Python standard library does not provide any built-in - non-iterable containers, so we have to define our own. - """ - - def __init__(self, inner): - self.inner = inner - - def __contains__(self, item): - return item in self.inner - - -parametrize_test_configurable_status_codes_deprecated = pytest.mark.parametrize( - "failed_request_status_codes,status_code,expected_error", - [ - (None, 500, True), - (None, 400, False), - ([500, 501], 500, True), - ([500, 501], 401, False), - ([range(400, 499)], 401, True), - ([range(400, 499)], 500, False), - ([range(400, 499), range(500, 599)], 300, False), - ([range(400, 499), range(500, 599)], 403, True), - ([range(400, 499), range(500, 599)], 503, True), - ([range(400, 403), 500, 501], 401, True), - ([range(400, 403), 500, 501], 405, False), - ([range(400, 403), 500, 501], 501, True), - ([range(400, 403), 500, 501], 503, False), - ([], 500, False), - ([NonIterableContainer(range(500, 600))], 500, True), - ([NonIterableContainer(range(500, 600))], 404, False), - ], -) -"""Test cases for configurable status codes (deprecated API). -Also used by the FastAPI tests. -""" - - -@parametrize_test_configurable_status_codes_deprecated -def test_configurable_status_codes_deprecated( - sentry_init, - capture_events, - failed_request_status_codes, - status_code, - expected_error, -): - with pytest.warns(DeprecationWarning): - starlette_integration = StarletteIntegration( - failed_request_status_codes=failed_request_status_codes - ) - - sentry_init(integrations=[starlette_integration]) - - events = capture_events() - - async def _error(request): - raise HTTPException(status_code) - - app = starlette.applications.Starlette( - routes=[ - starlette.routing.Route("/error", _error, methods=["GET"]), - ], - ) - - client = TestClient(app) - client.get("/error") - - if expected_error: - assert len(events) == 1 - else: - assert not events - - @pytest.mark.skipif( STARLETTE_VERSION < (0, 21), reason="Requires Starlette >= 0.21, because earlier versions do not support HTTP 'HEAD' requests", @@ -1267,9 +1195,8 @@ def test_transaction_http_method_default(sentry_init, capture_events): """ sentry_init( traces_sample_rate=1.0, - integrations=[ - StarletteIntegration(), - ], + auto_enabling_integrations=False, # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request. + integrations=[StarletteIntegration()], ) events = capture_events() @@ -1294,6 +1221,7 @@ def test_transaction_http_method_default(sentry_init, capture_events): def test_transaction_http_method_custom(sentry_init, capture_events): sentry_init( traces_sample_rate=1.0, + auto_enabling_integrations=False, # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request. integrations=[ StarletteIntegration( http_methods_to_capture=( diff --git a/tests/integrations/starlite/test_starlite.py b/tests/integrations/starlite/test_starlite.py index 2c3aa704f5..79fa755608 100644 --- a/tests/integrations/starlite/test_starlite.py +++ b/tests/integrations/starlite/test_starlite.py @@ -5,6 +5,7 @@ from sentry_sdk import capture_message from sentry_sdk.integrations.starlite import StarliteIntegration +from tests.conftest import ApproxDict from typing import Any, Dict @@ -199,7 +200,7 @@ def is_matching_span(expected_span, actual_span): return ( expected_span["op"] == actual_span["op"] and expected_span["description"] == actual_span["description"] - and expected_span["tags"] == actual_span["tags"] + and ApproxDict(expected_span["tags"]) == actual_span["tags"] ) actual_starlite_spans = list( @@ -295,7 +296,7 @@ def is_matching_span(expected_span, actual_span): return ( expected_span["op"] == actual_span["op"] and actual_span["description"].startswith(expected_span["description"]) - and expected_span["tags"] == actual_span["tags"] + and ApproxDict(expected_span["tags"]) == actual_span["tags"] ) actual_starlite_spans = list( diff --git a/tests/integrations/stdlib/test_httplib.py b/tests/integrations/stdlib/test_httplib.py index 908a22dc6c..5f6d57998b 100644 --- a/tests/integrations/stdlib/test_httplib.py +++ b/tests/integrations/stdlib/test_httplib.py @@ -6,16 +6,40 @@ import pytest -from sentry_sdk import capture_message, start_transaction +from sentry_sdk import capture_message, start_span, continue_trace, isolation_scope from sentry_sdk.consts import MATCH_ALL, SPANDATA -from sentry_sdk.tracing import Transaction from sentry_sdk.integrations.stdlib import StdlibIntegration -from tests.conftest import ApproxDict, create_mock_http_server +from tests.conftest import ApproxDict, SortedBaggage, create_mock_http_server PORT = create_mock_http_server() +@pytest.fixture +def capture_request_headers(monkeypatch): + """ + HTTPConnection.send is passed a string containing (among other things) + the headers on the request. Mock it so we can check the headers. + """ + + def inner(do_send=True): + request_headers = {} + old_send = HTTPConnection.send + + def patched_send(self, data): + for line in data.decode("utf-8").split("\r\n")[1:]: + if line: + key, val = line.split(": ") + request_headers[key] = val + if do_send: + old_send(self, data) + + monkeypatch.setattr(HTTPConnection, "send", patched_send) + return request_headers + + return inner + + def test_crumb_capture(sentry_init, capture_events): sentry_init(integrations=[StdlibIntegration()]) events = capture_events() @@ -45,8 +69,8 @@ def test_crumb_capture(sentry_init, capture_events): @pytest.mark.parametrize( "status_code,level", [ - (200, None), - (301, None), + (200, "info"), + (301, "info"), (403, "warning"), (405, "warning"), (500, "error"), @@ -69,12 +93,7 @@ def test_crumb_capture_client_error(sentry_init, capture_events, status_code, le assert crumb["type"] == "http" assert crumb["category"] == "httplib" - - if level is None: - assert "level" not in crumb - else: - assert crumb["level"] == level - + assert crumb["level"] == level assert crumb["data"] == ApproxDict( { "url": url, @@ -123,7 +142,7 @@ def test_empty_realurl(sentry_init): """ sentry_init(dsn="") - HTTPConnection("example.com", port=443).putrequest("POST", None) + HTTPConnection("localhost", PORT).putrequest("POST", None) def test_httplib_misuse(sentry_init, capture_events, request): @@ -175,98 +194,89 @@ def test_httplib_misuse(sentry_init, capture_events, request): ) -def test_outgoing_trace_headers(sentry_init, monkeypatch): - # HTTPSConnection.send is passed a string containing (among other things) - # the headers on the request. Mock it so we can check the headers, and also - # so it doesn't try to actually talk to the internet. - mock_send = mock.Mock() - monkeypatch.setattr(HTTPSConnection, "send", mock_send) - +def test_outgoing_trace_headers( + sentry_init, capture_envelopes, capture_request_headers +): sentry_init(traces_sample_rate=1.0) + envelopes = capture_envelopes() + request_headers = capture_request_headers() headers = { + "sentry-trace": "771a43a4192642f0b136d5159a501700-1234567890abcdef-1", "baggage": ( "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, " "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, " - "sentry-user_id=Am%C3%A9lie, sentry-sample_rand=0.132521102938283, other-vendor-value-2=foo;bar;" + "sentry-user_id=Am%C3%A9lie, sentry-sample_rand=0.003370, other-vendor-value-2=foo;bar;" ), } - transaction = Transaction.continue_from_headers(headers) + with isolation_scope(): + with continue_trace(headers): + with start_span(name="/interactions/other-dogs/new-dog"): + conn = HTTPConnection("localhost", PORT) + conn.request("GET", "/top-chasers") + conn.getresponse() + + (envelope,) = envelopes + transaction = envelope.get_transaction_event() + request_span = transaction["spans"][-1] + + expected_sentry_trace = "{trace_id}-{parent_span_id}-{sampled}".format( + trace_id=transaction["contexts"]["trace"]["trace_id"], + parent_span_id=request_span["span_id"], + sampled=1, + ) + assert request_headers["sentry-trace"] == expected_sentry_trace + + expected_outgoing_baggage = ( + "sentry-trace_id=771a43a4192642f0b136d5159a501700," + "sentry-public_key=49d0f7386ad645858ae85020e393bef3," + "sentry-sample_rate=0.01337," + "sentry-user_id=Am%C3%A9lie," + "sentry-sample_rand=0.003370," + "sentry-sampled=true" + ) - with start_transaction( - transaction=transaction, - name="/interactions/other-dogs/new-dog", - op="greeting.sniff", - trace_id="12312012123120121231201212312012", - ) as transaction: - HTTPSConnection("www.squirrelchasers.com").request("GET", "/top-chasers") + assert request_headers["baggage"] == SortedBaggage(expected_outgoing_baggage) - (request_str,) = mock_send.call_args[0] - request_headers = {} - for line in request_str.decode("utf-8").split("\r\n")[1:]: - if line: - key, val = line.split(": ") - request_headers[key] = val - - request_span = transaction._span_recorder.spans[-1] - expected_sentry_trace = "{trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction.trace_id, - parent_span_id=request_span.span_id, - sampled=1, - ) - assert request_headers["sentry-trace"] == expected_sentry_trace - - expected_outgoing_baggage = ( - "sentry-trace_id=771a43a4192642f0b136d5159a501700," - "sentry-public_key=49d0f7386ad645858ae85020e393bef3," - "sentry-sample_rate=1.0," - "sentry-user_id=Am%C3%A9lie," - "sentry-sample_rand=0.132521102938283" - ) - - assert request_headers["baggage"] == expected_outgoing_baggage - - -def test_outgoing_trace_headers_head_sdk(sentry_init, monkeypatch): - # HTTPSConnection.send is passed a string containing (among other things) - # the headers on the request. Mock it so we can check the headers, and also - # so it doesn't try to actually talk to the internet. - mock_send = mock.Mock() - monkeypatch.setattr(HTTPSConnection, "send", mock_send) +def test_outgoing_trace_headers_head_sdk( + sentry_init, capture_request_headers, capture_envelopes +): sentry_init(traces_sample_rate=0.5, release="foo") - with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.25): - transaction = Transaction.continue_from_headers({}) + envelopes = capture_envelopes() + request_headers = capture_request_headers() - with start_transaction(transaction=transaction, name="Head SDK tx") as transaction: - HTTPSConnection("www.squirrelchasers.com").request("GET", "/top-chasers") + with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.25): + with isolation_scope(): + with continue_trace({}): + with start_span(name="Head SDK tx") as root_span: + conn = HTTPConnection("localhost", PORT) + conn.request("GET", "/top-chasers") + conn.getresponse() + + (envelope,) = envelopes + transaction = envelope.get_transaction_event() + request_span = transaction["spans"][-1] + + expected_sentry_trace = "{trace_id}-{parent_span_id}-{sampled}".format( + trace_id=transaction["contexts"]["trace"]["trace_id"], + parent_span_id=request_span["span_id"], + sampled=1, + ) + assert request_headers["sentry-trace"] == expected_sentry_trace + + expected_outgoing_baggage = ( + f"sentry-trace_id={root_span.trace_id}," # noqa: E231 + "sentry-sample_rand=0.250000," + "sentry-environment=production," + "sentry-release=foo," + "sentry-sample_rate=0.5," + "sentry-sampled=true," + "sentry-transaction=Head%20SDK%20tx" + ) - (request_str,) = mock_send.call_args[0] - request_headers = {} - for line in request_str.decode("utf-8").split("\r\n")[1:]: - if line: - key, val = line.split(": ") - request_headers[key] = val - - request_span = transaction._span_recorder.spans[-1] - expected_sentry_trace = "{trace_id}-{parent_span_id}-{sampled}".format( - trace_id=transaction.trace_id, - parent_span_id=request_span.span_id, - sampled=1, - ) - assert request_headers["sentry-trace"] == expected_sentry_trace - - expected_outgoing_baggage = ( - "sentry-trace_id=%s," - "sentry-sample_rand=0.250000," - "sentry-environment=production," - "sentry-release=foo," - "sentry-sample_rate=0.5," - "sentry-sampled=%s" - ) % (transaction.trace_id, "true" if transaction.sampled else "false") - - assert request_headers["baggage"] == expected_outgoing_baggage + assert request_headers["baggage"] == SortedBaggage(expected_outgoing_baggage) @pytest.mark.parametrize( @@ -329,42 +339,23 @@ def test_outgoing_trace_headers_head_sdk(sentry_init, monkeypatch): ], ) def test_option_trace_propagation_targets( - sentry_init, monkeypatch, trace_propagation_targets, host, path, trace_propagated + sentry_init, + capture_request_headers, + trace_propagation_targets, + host, + path, + trace_propagated, ): - # HTTPSConnection.send is passed a string containing (among other things) - # the headers on the request. Mock it so we can check the headers, and also - # so it doesn't try to actually talk to the internet. - mock_send = mock.Mock() - monkeypatch.setattr(HTTPSConnection, "send", mock_send) - sentry_init( trace_propagation_targets=trace_propagation_targets, traces_sample_rate=1.0, ) - headers = { - "baggage": ( - "sentry-trace_id=771a43a4192642f0b136d5159a501700, " - "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, " - ) - } - - transaction = Transaction.continue_from_headers(headers) + request_headers = capture_request_headers(do_send=False) - with start_transaction( - transaction=transaction, - name="/interactions/other-dogs/new-dog", - op="greeting.sniff", - trace_id="12312012123120121231201212312012", - ) as transaction: + with start_span(name="foo"): HTTPSConnection(host).request("GET", path) - - (request_str,) = mock_send.call_args[0] - request_headers = {} - for line in request_str.decode("utf-8").split("\r\n")[1:]: - if line: - key, val = line.split(": ") - request_headers[key] = val + # don't invoke getresponse to avoid actual network traffic if trace_propagated: assert "sentry-trace" in request_headers @@ -378,8 +369,8 @@ def test_span_origin(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0, debug=True) events = capture_events() - with start_transaction(name="foo"): - conn = HTTPConnection("example.com") + with start_span(name="foo"): + conn = HTTPConnection("localhost", PORT) conn.request("GET", "/foo") conn.getresponse() @@ -399,7 +390,7 @@ def test_http_timeout(monkeypatch, sentry_init, capture_envelopes): envelopes = capture_envelopes() with pytest.raises(TimeoutError): - with start_transaction(op="op", name="name"): + with start_span(op="op", name="name"): conn = HTTPSConnection("www.example.com") conn.request("GET", "/bla") conn.getresponse() diff --git a/tests/integrations/stdlib/test_subprocess.py b/tests/integrations/stdlib/test_subprocess.py index 593ef8a0dc..8e3166e512 100644 --- a/tests/integrations/stdlib/test_subprocess.py +++ b/tests/integrations/stdlib/test_subprocess.py @@ -3,10 +3,11 @@ import subprocess import sys from collections.abc import Mapping +from unittest import mock import pytest -from sentry_sdk import capture_message, start_transaction +from sentry_sdk import capture_exception, capture_message, start_span from sentry_sdk.integrations.stdlib import StdlibIntegration from tests.conftest import ApproxDict @@ -58,7 +59,7 @@ def test_subprocess_basic( sentry_init(integrations=[StdlibIntegration()], traces_sample_rate=1.0) events = capture_events() - with start_transaction(name="foo") as transaction: + with start_span(name="foo") as span: args = [ sys.executable, "-c", @@ -109,7 +110,7 @@ def test_subprocess_basic( assert os.environ == old_environ - assert transaction.trace_id in str(output) + assert span.trace_id in str(output) capture_message("hi") @@ -177,7 +178,7 @@ def test_subprocess_basic( def test_subprocess_empty_env(sentry_init, monkeypatch): monkeypatch.setenv("TEST_MARKER", "should_not_be_seen") sentry_init(integrations=[StdlibIntegration()], traces_sample_rate=1.0) - with start_transaction(name="foo"): + with start_span(name="foo"): args = [ sys.executable, "-c", @@ -200,7 +201,7 @@ def test_subprocess_span_origin(sentry_init, capture_events): sentry_init(integrations=[StdlibIntegration()], traces_sample_rate=1.0) events = capture_events() - with start_transaction(name="foo"): + with start_span(name="foo"): args = [ sys.executable, "-c", @@ -224,3 +225,37 @@ def test_subprocess_span_origin(sentry_init, capture_events): assert event["spans"][2]["op"] == "subprocess.wait" assert event["spans"][2]["origin"] == "auto.subprocess.stdlib.subprocess" + + +def test_subprocess_breadcrumb(sentry_init, capture_events): + sentry_init() + events = capture_events() + + args = [ + sys.executable, + "-c", + "print('hello world')", + ] + popen = subprocess.Popen(args) + popen.communicate() + popen.poll() + + try: + 1 / 0 + except ZeroDivisionError as ex: + capture_exception(ex) + + (event,) = events + breadcrumbs = event["breadcrumbs"]["values"] + assert len(breadcrumbs) == 1 + + (crumb,) = breadcrumbs + assert crumb["type"] == "subprocess" + assert crumb["category"] == "subprocess" + assert crumb["message"] == " ".join(args) + assert crumb["timestamp"] == mock.ANY + assert crumb["data"] == { + "subprocess.pid": popen.pid, + "thread.id": mock.ANY, + "thread.name": mock.ANY, + } diff --git a/tests/integrations/strawberry/test_strawberry.py b/tests/integrations/strawberry/test_strawberry.py index 7b40b238d2..d1774aeca5 100644 --- a/tests/integrations/strawberry/test_strawberry.py +++ b/tests/integrations/strawberry/test_strawberry.py @@ -204,7 +204,9 @@ def test_capture_request_if_available_and_send_pii_is_on( (error_event,) = events - assert error_event["exception"]["values"][0]["mechanism"]["type"] == "strawberry" + assert len(error_event["exception"]["values"]) == 2 + assert error_event["exception"]["values"][0]["mechanism"]["type"] == "chained" + assert error_event["exception"]["values"][-1]["mechanism"]["type"] == "strawberry" assert error_event["request"]["api_target"] == "graphql" assert error_event["request"]["data"] == { "query": query, @@ -258,7 +260,10 @@ def test_do_not_capture_request_if_send_pii_is_off( assert len(events) == 1 (error_event,) = events - assert error_event["exception"]["values"][0]["mechanism"]["type"] == "strawberry" + + assert len(error_event["exception"]["values"]) == 2 + assert error_event["exception"]["values"][0]["mechanism"]["type"] == "chained" + assert error_event["exception"]["values"][-1]["mechanism"]["type"] == "strawberry" assert "data" not in error_event["request"] assert "response" not in error_event["contexts"] @@ -505,7 +510,7 @@ def test_transaction_no_operation_name( query_span = query_spans[0] assert query_span["description"] == "query" assert query_span["data"]["graphql.operation.type"] == "query" - assert query_span["data"]["graphql.operation.name"] is None + assert "graphql.operation.name" not in query_span["data"] assert query_span["data"]["graphql.document"] == query assert query_span["data"]["graphql.resource_name"] @@ -582,7 +587,7 @@ def test_transaction_mutation( query_span = query_spans[0] assert query_span["description"] == "mutation" assert query_span["data"]["graphql.operation.type"] == "mutation" - assert query_span["data"]["graphql.operation.name"] is None + assert query_span["data"]["graphql.operation.name"] == "Change" assert query_span["data"]["graphql.document"] == query assert query_span["data"]["graphql.resource_name"] diff --git a/tests/integrations/threading/test_threading.py b/tests/integrations/threading/test_threading.py index 4395891d62..8a5dfef62b 100644 --- a/tests/integrations/threading/test_threading.py +++ b/tests/integrations/threading/test_threading.py @@ -2,6 +2,7 @@ from concurrent import futures from textwrap import dedent from threading import Thread +import sys import pytest @@ -36,11 +37,11 @@ def crash(): assert not events -@pytest.mark.parametrize("propagate_hub", (True, False)) -def test_propagates_hub(sentry_init, capture_events, propagate_hub): +@pytest.mark.parametrize("propagate_scope", (True, False)) +def test_propagates_scope(sentry_init, capture_events, propagate_scope): sentry_init( default_integrations=False, - integrations=[ThreadingIntegration(propagate_hub=propagate_hub)], + integrations=[ThreadingIntegration(propagate_scope=propagate_scope)], ) events = capture_events() @@ -66,25 +67,25 @@ def stage2(): assert exception["mechanism"]["type"] == "threading" assert not exception["mechanism"]["handled"] - if propagate_hub: + if propagate_scope: assert event["tags"]["stage1"] == "true" else: assert "stage1" not in event.get("tags", {}) -@pytest.mark.parametrize("propagate_hub", (True, False)) -def test_propagates_threadpool_hub(sentry_init, capture_events, propagate_hub): +@pytest.mark.parametrize("propagate_scope", (True, False)) +def test_propagates_threadpool_scope(sentry_init, capture_events, propagate_scope): sentry_init( traces_sample_rate=1.0, - integrations=[ThreadingIntegration(propagate_hub=propagate_hub)], + integrations=[ThreadingIntegration(propagate_scope=propagate_scope)], ) events = capture_events() def double(number): - with sentry_sdk.start_span(op="task", name=str(number)): + with sentry_sdk.start_span(op="task", name=str(number), only_if_parent=True): return number * 2 - with sentry_sdk.start_transaction(name="test_handles_threadpool"): + with sentry_sdk.start_span(name="test_handles_threadpool"): with futures.ThreadPoolExecutor(max_workers=1) as executor: tasks = [executor.submit(double, number) for number in [1, 2, 3, 4]] for future in futures.as_completed(tasks): @@ -92,7 +93,7 @@ def double(number): sentry_sdk.flush() - if propagate_hub: + if propagate_scope: assert len(events) == 1 (event,) = events assert event["spans"][0]["trace_id"] == event["spans"][1]["trace_id"] @@ -104,14 +105,15 @@ def double(number): assert len(event["spans"]) == 0 -@pytest.mark.skip(reason="Temporarily disable to release SDK 2.0a1.") +@pytest.mark.skipif(sys.version[:3] == "3.8", reason="Fails in CI on 3.8") def test_circular_references(sentry_init, request): sentry_init(default_integrations=False, integrations=[ThreadingIntegration()]) - gc.collect() gc.disable() request.addfinalizer(gc.enable) + gc.collect() + class MyThread(Thread): def run(self): pass @@ -232,8 +234,8 @@ def do_some_work(number): threads = [] - with sentry_sdk.start_transaction(op="outer-trx"): - for number in range(5): + with sentry_sdk.start_span(op="outer-trx"): + for number in range(2): with sentry_sdk.start_span( op=f"outer-submit-{number}", name="Thread: main" ): @@ -244,32 +246,44 @@ def do_some_work(number): for t in threads: t.join() - (event,) = events if propagate_scope: + # The children spans from the threads become parts of the existing span + # tree since we propagated the scope + assert len(events) == 1 + (event,) = events + assert render_span_tree(event) == dedent( """\ - op="outer-trx": description=null - op="outer-submit-0": description="Thread: main" - op="inner-run-0": description="Thread: child-0" - op="outer-submit-1": description="Thread: main" - - op="inner-run-1": description="Thread: child-1" - - op="outer-submit-2": description="Thread: main" - - op="inner-run-2": description="Thread: child-2" - - op="outer-submit-3": description="Thread: main" - - op="inner-run-3": description="Thread: child-3" - - op="outer-submit-4": description="Thread: main" - - op="inner-run-4": description="Thread: child-4"\ + - op="inner-run-1": description="Thread: child-1"\ """ ) elif not propagate_scope: - assert render_span_tree(event) == dedent( + # The spans from the threads become their own root spans/transactions + # as the connection to the parent span was severed when the scope was + # cleared + assert len(events) == 3 + (event1, event2, event3) = sorted(events, key=render_span_tree) + + assert render_span_tree(event1) == dedent( + """\ + - op="inner-run-0": description=null\ +""" + ) + assert render_span_tree(event2) == dedent( + """\ + - op="inner-run-1": description=null\ +""" + ) + + assert render_span_tree(event3) == dedent( """\ - op="outer-trx": description=null - op="outer-submit-0": description="Thread: main" - - op="outer-submit-1": description="Thread: main" - - op="outer-submit-2": description="Thread: main" - - op="outer-submit-3": description="Thread: main" - - op="outer-submit-4": description="Thread: main"\ + - op="outer-submit-1": description="Thread: main"\ """ ) diff --git a/tests/integrations/tornado/test_tornado.py b/tests/integrations/tornado/test_tornado.py index 294f605f6a..e5dae3fcd9 100644 --- a/tests/integrations/tornado/test_tornado.py +++ b/tests/integrations/tornado/test_tornado.py @@ -1,9 +1,10 @@ import json +import re import pytest import sentry_sdk -from sentry_sdk import start_transaction, capture_message +from sentry_sdk import start_span, capture_message from sentry_sdk.integrations.tornado import TornadoIntegration from tornado.web import RequestHandler, Application, HTTPError @@ -116,7 +117,7 @@ def test_transactions(tornado_testcase, sentry_init, capture_events, handler, co events = capture_events() client = tornado_testcase(Application([(r"/hi", handler)])) - with start_transaction(name="client") as span: + with start_span(name="client") as span: pass response = client.fetch( @@ -134,7 +135,7 @@ def test_transactions(tornado_testcase, sentry_init, capture_events, handler, co assert client_tx["transaction"] == "client" assert client_tx["transaction_info"] == { "source": "custom" - } # because this is just the start_transaction() above. + } # because this is just the start_span() above. if server_error is not None: assert server_error["exception"]["values"][0]["type"] == "ZeroDivisionError" @@ -450,3 +451,30 @@ def test_span_origin(tornado_testcase, sentry_init, capture_events): (_, event) = events assert event["contexts"]["trace"]["origin"] == "auto.http.tornado" + + +def test_attributes_in_traces_sampler(tornado_testcase, sentry_init): + def traces_sampler(sampling_context): + assert sampling_context["url.query"] == "foo=bar" + assert sampling_context["url.path"] == "/hi" + assert sampling_context["url.scheme"] == "http" + assert re.match( + r"http:\/\/127\.0\.0\.1:[0-9]{4,5}\/hi\?foo=bar", + sampling_context["url.full"], + ) + assert sampling_context["http.request.method"] == "GET" + assert sampling_context["server.address"] == "127.0.0.1" + assert sampling_context["server.port"].isnumeric() + assert sampling_context["network.protocol.name"] == "HTTP" + assert sampling_context["network.protocol.version"] == "1.1" + assert sampling_context["http.request.header.custom-header"] == "Custom Value" + + return True + + sentry_init( + integrations=[TornadoIntegration], + traces_sampler=traces_sampler, + ) + + client = tornado_testcase(Application([(r"/hi", HelloHandler)])) + client.fetch("/hi?foo=bar", headers={"Custom-Header": "Custom Value"}) diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py index 656fc1757f..76c80f6c6a 100644 --- a/tests/integrations/wsgi/test_wsgi.py +++ b/tests/integrations/wsgi/test_wsgi.py @@ -40,7 +40,7 @@ def next(self): def test_basic(sentry_init, crashing_app, capture_events): - sentry_init(send_default_pii=True) + sentry_init(send_default_pii=True, debug=True) app = SentryWsgiMiddleware(crashing_app) client = Client(app) events = capture_events() @@ -141,7 +141,7 @@ def test_transaction_with_error( def dogpark(environ, start_response): raise ValueError("Fetch aborted. The ball was not returned.") - sentry_init(send_default_pii=True, traces_sample_rate=1.0) + sentry_init(send_default_pii=True, traces_sample_rate=1.0, debug=True) app = SentryWsgiMiddleware(dogpark) client = Client(app) events = capture_events() @@ -326,33 +326,27 @@ def dogpark(environ, start_response): assert error_event["contexts"]["trace"]["trace_id"] == trace_id -def test_traces_sampler_gets_correct_values_in_sampling_context( - sentry_init, - DictionaryContaining, # noqa:N803 -): +def test_traces_sampler_gets_correct_values_in_sampling_context(sentry_init): def app(environ, start_response): start_response("200 OK", []) return ["Go get the ball! Good dog!"] - traces_sampler = mock.Mock(return_value=True) + def traces_sampler(sampling_context): + assert sampling_context["http.request.method"] == "GET" + assert sampling_context["url.path"] == "/dogs/are/great/" + assert sampling_context["url.query"] == "cats=too" + assert sampling_context["url.scheme"] == "http" + assert ( + sampling_context["url.full"] == "http://localhost/dogs/are/great/?cats=too" + ) + assert sampling_context["http.request.header.custom-header"] == "Custom Value" + return True + sentry_init(send_default_pii=True, traces_sampler=traces_sampler) app = SentryWsgiMiddleware(app) client = Client(app) - client.get("/dogs/are/great/") - - traces_sampler.assert_any_call( - DictionaryContaining( - { - "wsgi_environ": DictionaryContaining( - { - "PATH_INFO": "/dogs/are/great/", - "REQUEST_METHOD": "GET", - }, - ), - } - ) - ) + client.get("/dogs/are/great/?cats=too", headers={"Custom-Header": "Custom Value"}) def test_session_mode_defaults_to_request_mode_in_wsgi_handler( @@ -443,7 +437,7 @@ def test_app(environ, start_response): sentry_init( traces_sample_rate=1.0, - _experiments={"profiles_sample_rate": 1.0}, + profiles_sample_rate=1.0, ) app = SentryWsgiMiddleware(test_app) envelopes = capture_envelopes() diff --git a/tests/new_scopes_compat/__init__.py b/tests/new_scopes_compat/__init__.py deleted file mode 100644 index 45391bd9ad..0000000000 --- a/tests/new_scopes_compat/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -""" -Separate module for tests that check backwards compatibility of the Hub API with 1.x. -These tests should be removed once we remove the Hub API, likely in the next major. - -All tests in this module are run with hub isolation, provided by `isolate_hub` autouse -fixture, defined in `conftest.py`. -""" diff --git a/tests/new_scopes_compat/conftest.py b/tests/new_scopes_compat/conftest.py deleted file mode 100644 index 9f16898dea..0000000000 --- a/tests/new_scopes_compat/conftest.py +++ /dev/null @@ -1,8 +0,0 @@ -import pytest -import sentry_sdk - - -@pytest.fixture(autouse=True) -def isolate_hub(suppress_deprecation_warnings): - with sentry_sdk.Hub(None): - yield diff --git a/tests/new_scopes_compat/test_new_scopes_compat.py b/tests/new_scopes_compat/test_new_scopes_compat.py deleted file mode 100644 index 21e2ac27d3..0000000000 --- a/tests/new_scopes_compat/test_new_scopes_compat.py +++ /dev/null @@ -1,275 +0,0 @@ -import sentry_sdk -from sentry_sdk.hub import Hub - -""" -Those tests are meant to check the compatibility of the new scopes in SDK 2.0 with the old Hub/Scope system in SDK 1.x. - -Those tests have been run with the latest SDK 1.x versiona and the data used in the `assert` statements represents -the behvaior of the SDK 1.x. - -This makes sure that we are backwards compatible. (on a best effort basis, there will probably be some edge cases that are not covered here) -""" - - -def test_configure_scope_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with configure_scope` block. - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with sentry_sdk.configure_scope() as scope: # configure scope - sentry_sdk.set_tag("B1", 1) - scope.set_tag("B2", 1) - sentry_sdk.capture_message("Event B") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1} - assert event_z["tags"] == {"A": 1, "B1": 1, "B2": 1, "Z": 1} - - -def test_push_scope_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with push_scope` block - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with sentry_sdk.push_scope() as scope: # push scope - sentry_sdk.set_tag("B1", 1) - scope.set_tag("B2", 1) - sentry_sdk.capture_message("Event B") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1} - assert event_z["tags"] == {"A": 1, "Z": 1} - - -def test_with_hub_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with Hub:` block - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with Hub.current as hub: # with hub - sentry_sdk.set_tag("B1", 1) - hub.scope.set_tag("B2", 1) - sentry_sdk.capture_message("Event B") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1} - assert event_z["tags"] == {"A": 1, "B1": 1, "B2": 1, "Z": 1} - - -def test_with_hub_configure_scope_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with Hub:` containing a `with configure_scope` block - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with Hub.current as hub: # with hub - sentry_sdk.set_tag("B1", 1) - with hub.configure_scope() as scope: # configure scope - sentry_sdk.set_tag("B2", 1) - hub.scope.set_tag("B3", 1) - scope.set_tag("B4", 1) - sentry_sdk.capture_message("Event B") - sentry_sdk.set_tag("B5", 1) - sentry_sdk.capture_message("Event C") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_c, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1} - assert event_c["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1, "B5": 1} - assert event_z["tags"] == { - "A": 1, - "B1": 1, - "B2": 1, - "B3": 1, - "B4": 1, - "B5": 1, - "Z": 1, - } - - -def test_with_hub_push_scope_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with Hub:` containing a `with push_scope` block - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with Hub.current as hub: # with hub - sentry_sdk.set_tag("B1", 1) - with hub.push_scope() as scope: # push scope - sentry_sdk.set_tag("B2", 1) - hub.scope.set_tag("B3", 1) - scope.set_tag("B4", 1) - sentry_sdk.capture_message("Event B") - sentry_sdk.set_tag("B5", 1) - sentry_sdk.capture_message("Event C") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_c, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1} - assert event_c["tags"] == {"A": 1, "B1": 1, "B5": 1} - assert event_z["tags"] == {"A": 1, "B1": 1, "B5": 1, "Z": 1} - - -def test_with_cloned_hub_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with cloned Hub:` block - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with Hub(Hub.current) as hub: # clone hub - sentry_sdk.set_tag("B1", 1) - hub.scope.set_tag("B2", 1) - sentry_sdk.capture_message("Event B") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1} - assert event_z["tags"] == {"A": 1, "Z": 1} - - -def test_with_cloned_hub_configure_scope_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with cloned Hub:` containing a `with configure_scope` block - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with Hub(Hub.current) as hub: # clone hub - sentry_sdk.set_tag("B1", 1) - with hub.configure_scope() as scope: # configure scope - sentry_sdk.set_tag("B2", 1) - hub.scope.set_tag("B3", 1) - scope.set_tag("B4", 1) - sentry_sdk.capture_message("Event B") - sentry_sdk.set_tag("B5", 1) - sentry_sdk.capture_message("Event C") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_c, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1} - assert event_c["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1, "B5": 1} - assert event_z["tags"] == {"A": 1, "Z": 1} - - -def test_with_cloned_hub_push_scope_sdk1(sentry_init, capture_events): - """ - Mutate data in a `with cloned Hub:` containing a `with push_scope` block - - Checks the results of SDK 2.x against the results the same code returned in SDK 1.x. - """ - sentry_init() - - events = capture_events() - - sentry_sdk.set_tag("A", 1) - sentry_sdk.capture_message("Event A") - - with Hub(Hub.current) as hub: # clone hub - sentry_sdk.set_tag("B1", 1) - with hub.push_scope() as scope: # push scope - sentry_sdk.set_tag("B2", 1) - hub.scope.set_tag("B3", 1) - scope.set_tag("B4", 1) - sentry_sdk.capture_message("Event B") - sentry_sdk.set_tag("B5", 1) - sentry_sdk.capture_message("Event C") - - sentry_sdk.set_tag("Z", 1) - sentry_sdk.capture_message("Event Z") - - (event_a, event_b, event_c, event_z) = events - - # Check against the results the same code returned in SDK 1.x - assert event_a["tags"] == {"A": 1} - assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1} - assert event_c["tags"] == {"A": 1, "B1": 1, "B5": 1} - assert event_z["tags"] == {"A": 1, "Z": 1} diff --git a/tests/new_scopes_compat/test_new_scopes_compat_event.py b/tests/new_scopes_compat/test_new_scopes_compat_event.py deleted file mode 100644 index db1e5fec4b..0000000000 --- a/tests/new_scopes_compat/test_new_scopes_compat_event.py +++ /dev/null @@ -1,503 +0,0 @@ -import pytest - -from unittest import mock - -import sentry_sdk -from sentry_sdk.hub import Hub -from sentry_sdk.integrations import iter_default_integrations -from sentry_sdk.scrubber import EventScrubber, DEFAULT_DENYLIST - - -""" -Those tests are meant to check the compatibility of the new scopes in SDK 2.0 with the old Hub/Scope system in SDK 1.x. - -Those tests have been run with the latest SDK 1.x version and the data used in the `assert` statements represents -the behvaior of the SDK 1.x. - -This makes sure that we are backwards compatible. (on a best effort basis, there will probably be some edge cases that are not covered here) -""" - - -@pytest.fixture -def integrations(): - return [ - integration.identifier - for integration in iter_default_integrations( - with_auto_enabling_integrations=False - ) - ] - - -@pytest.fixture -def expected_error(integrations): - def create_expected_error_event(trx, span): - return { - "level": "warning-X", - "exception": { - "values": [ - { - "mechanism": {"type": "generic", "handled": True}, - "module": None, - "type": "ValueError", - "value": "This is a test exception", - "stacktrace": { - "frames": [ - { - "filename": "tests/new_scopes_compat/test_new_scopes_compat_event.py", - "abs_path": mock.ANY, - "function": "_faulty_function", - "module": "tests.new_scopes_compat.test_new_scopes_compat_event", - "lineno": mock.ANY, - "pre_context": [ - " return create_expected_transaction_event", - "", - "", - "def _faulty_function():", - " try:", - ], - "context_line": ' raise ValueError("This is a test exception")', - "post_context": [ - " except ValueError as ex:", - " sentry_sdk.capture_exception(ex)", - "", - "", - "def _test_before_send(event, hint):", - ], - "vars": { - "ex": mock.ANY, - }, - "in_app": True, - } - ] - }, - } - ] - }, - "event_id": mock.ANY, - "timestamp": mock.ANY, - "contexts": { - "character": { - "name": "Mighty Fighter changed by before_send", - "age": 19, - "attack_type": "melee", - }, - "trace": { - "trace_id": trx.trace_id, - "span_id": span.span_id, - "parent_span_id": span.parent_span_id, - "op": "test_span", - "origin": "manual", - "description": None, - "data": { - "thread.id": mock.ANY, - "thread.name": "MainThread", - }, - }, - "runtime": { - "name": "CPython", - "version": mock.ANY, - "build": mock.ANY, - }, - }, - "user": { - "id": "123", - "email": "jane.doe@example.com", - "ip_address": "[Filtered]", - }, - "transaction": "test_transaction", - "transaction_info": {"source": "custom"}, - "tags": {"tag1": "tag1_value", "tag2": "tag2_value"}, - "extra": { - "extra1": "extra1_value", - "extra2": "extra2_value", - "should_be_removed_by_event_scrubber": "[Filtered]", - "sys.argv": "[Filtered]", - }, - "breadcrumbs": { - "values": [ - { - "category": "error-level", - "message": "Authenticated user %s", - "level": "error", - "data": {"breadcrumb2": "somedata"}, - "timestamp": mock.ANY, - "type": "default", - } - ] - }, - "modules": mock.ANY, - "release": "0.1.2rc3", - "environment": "checking-compatibility-with-sdk1", - "server_name": mock.ANY, - "sdk": { - "name": "sentry.python", - "version": mock.ANY, - "packages": [{"name": "pypi:sentry-sdk", "version": mock.ANY}], - "integrations": integrations, - }, - "platform": "python", - "_meta": { - "user": {"ip_address": {"": {"rem": [["!config", "s"]]}}}, - "extra": { - "should_be_removed_by_event_scrubber": { - "": {"rem": [["!config", "s"]]} - }, - "sys.argv": {"": {"rem": [["!config", "s"]]}}, - }, - }, - } - - return create_expected_error_event - - -@pytest.fixture -def expected_transaction(integrations): - def create_expected_transaction_event(trx, span): - return { - "type": "transaction", - "transaction": "test_transaction changed by before_send_transaction", - "transaction_info": {"source": "custom"}, - "contexts": { - "trace": { - "trace_id": trx.trace_id, - "span_id": trx.span_id, - "parent_span_id": None, - "op": "test_transaction_op", - "origin": "manual", - "description": None, - "data": { - "thread.id": mock.ANY, - "thread.name": "MainThread", - }, - }, - "character": { - "name": "Mighty Fighter changed by before_send_transaction", - "age": 19, - "attack_type": "melee", - }, - "runtime": { - "name": "CPython", - "version": mock.ANY, - "build": mock.ANY, - }, - }, - "tags": {"tag1": "tag1_value", "tag2": "tag2_value"}, - "timestamp": mock.ANY, - "start_timestamp": mock.ANY, - "spans": [ - { - "data": { - "thread.id": mock.ANY, - "thread.name": "MainThread", - }, - "trace_id": trx.trace_id, - "span_id": span.span_id, - "parent_span_id": span.parent_span_id, - "same_process_as_parent": True, - "op": "test_span", - "origin": "manual", - "description": None, - "start_timestamp": mock.ANY, - "timestamp": mock.ANY, - } - ], - "measurements": {"memory_used": {"value": 456, "unit": "byte"}}, - "event_id": mock.ANY, - "level": "warning-X", - "user": { - "id": "123", - "email": "jane.doe@example.com", - "ip_address": "[Filtered]", - }, - "extra": { - "extra1": "extra1_value", - "extra2": "extra2_value", - "should_be_removed_by_event_scrubber": "[Filtered]", - "sys.argv": "[Filtered]", - }, - "release": "0.1.2rc3", - "environment": "checking-compatibility-with-sdk1", - "server_name": mock.ANY, - "sdk": { - "name": "sentry.python", - "version": mock.ANY, - "packages": [{"name": "pypi:sentry-sdk", "version": mock.ANY}], - "integrations": integrations, - }, - "platform": "python", - "_meta": { - "user": {"ip_address": {"": {"rem": [["!config", "s"]]}}}, - "extra": { - "should_be_removed_by_event_scrubber": { - "": {"rem": [["!config", "s"]]} - }, - "sys.argv": {"": {"rem": [["!config", "s"]]}}, - }, - }, - } - - return create_expected_transaction_event - - -def _faulty_function(): - try: - raise ValueError("This is a test exception") - except ValueError as ex: - sentry_sdk.capture_exception(ex) - - -def _test_before_send(event, hint): - event["contexts"]["character"]["name"] += " changed by before_send" - return event - - -def _test_before_send_transaction(event, hint): - event["transaction"] += " changed by before_send_transaction" - event["contexts"]["character"]["name"] += " changed by before_send_transaction" - return event - - -def _test_before_breadcrumb(breadcrumb, hint): - if breadcrumb["category"] == "info-level": - return None - return breadcrumb - - -def _generate_event_data(scope=None): - """ - Generates some data to be used in the events sent by the tests. - """ - sentry_sdk.set_level("warning-X") - - sentry_sdk.add_breadcrumb( - category="info-level", - message="Authenticated user %s", - level="info", - data={"breadcrumb1": "somedata"}, - ) - sentry_sdk.add_breadcrumb( - category="error-level", - message="Authenticated user %s", - level="error", - data={"breadcrumb2": "somedata"}, - ) - - sentry_sdk.set_context( - "character", - { - "name": "Mighty Fighter", - "age": 19, - "attack_type": "melee", - }, - ) - - sentry_sdk.set_extra("extra1", "extra1_value") - sentry_sdk.set_extra("extra2", "extra2_value") - sentry_sdk.set_extra("should_be_removed_by_event_scrubber", "XXX") - - sentry_sdk.set_tag("tag1", "tag1_value") - sentry_sdk.set_tag("tag2", "tag2_value") - - sentry_sdk.set_user( - {"id": "123", "email": "jane.doe@example.com", "ip_address": "211.161.1.124"} - ) - - sentry_sdk.set_measurement("memory_used", 456, "byte") - - if scope is not None: - scope.add_attachment(bytes=b"Hello World", filename="hello.txt") - - -def _init_sentry_sdk(sentry_init): - sentry_init( - environment="checking-compatibility-with-sdk1", - release="0.1.2rc3", - before_send=_test_before_send, - before_send_transaction=_test_before_send_transaction, - before_breadcrumb=_test_before_breadcrumb, - event_scrubber=EventScrubber( - denylist=DEFAULT_DENYLIST - + ["should_be_removed_by_event_scrubber", "sys.argv"] - ), - send_default_pii=False, - traces_sample_rate=1.0, - auto_enabling_integrations=False, - ) - - -# -# The actual Tests start here! -# - - -def test_event(sentry_init, capture_envelopes, expected_error, expected_transaction): - _init_sentry_sdk(sentry_init) - - envelopes = capture_envelopes() - - with sentry_sdk.start_transaction( - name="test_transaction", op="test_transaction_op" - ) as trx: - with sentry_sdk.start_span(op="test_span") as span: - with sentry_sdk.configure_scope() as scope: # configure scope - _generate_event_data(scope) - _faulty_function() - - (error_envelope, transaction_envelope) = envelopes - - error = error_envelope.get_event() - transaction = transaction_envelope.get_transaction_event() - attachment = error_envelope.items[-1] - - assert error == expected_error(trx, span) - assert transaction == expected_transaction(trx, span) - assert attachment.headers == { - "filename": "hello.txt", - "type": "attachment", - "content_type": "text/plain", - } - assert attachment.payload.bytes == b"Hello World" - - -def test_event2(sentry_init, capture_envelopes, expected_error, expected_transaction): - _init_sentry_sdk(sentry_init) - - envelopes = capture_envelopes() - - with Hub(Hub.current): - sentry_sdk.set_tag("A", 1) # will not be added - - with Hub.current: # with hub - with sentry_sdk.push_scope() as scope: - scope.set_tag("B", 1) # will not be added - - with sentry_sdk.start_transaction( - name="test_transaction", op="test_transaction_op" - ) as trx: - with sentry_sdk.start_span(op="test_span") as span: - with sentry_sdk.configure_scope() as scope: # configure scope - _generate_event_data(scope) - _faulty_function() - - (error_envelope, transaction_envelope) = envelopes - - error = error_envelope.get_event() - transaction = transaction_envelope.get_transaction_event() - attachment = error_envelope.items[-1] - - assert error == expected_error(trx, span) - assert transaction == expected_transaction(trx, span) - assert attachment.headers == { - "filename": "hello.txt", - "type": "attachment", - "content_type": "text/plain", - } - assert attachment.payload.bytes == b"Hello World" - - -def test_event3(sentry_init, capture_envelopes, expected_error, expected_transaction): - _init_sentry_sdk(sentry_init) - - envelopes = capture_envelopes() - - with Hub(Hub.current): - sentry_sdk.set_tag("A", 1) # will not be added - - with Hub.current: # with hub - with sentry_sdk.push_scope() as scope: - scope.set_tag("B", 1) # will not be added - - with sentry_sdk.push_scope() as scope: # push scope - with sentry_sdk.start_transaction( - name="test_transaction", op="test_transaction_op" - ) as trx: - with sentry_sdk.start_span(op="test_span") as span: - _generate_event_data(scope) - _faulty_function() - - (error_envelope, transaction_envelope) = envelopes - - error = error_envelope.get_event() - transaction = transaction_envelope.get_transaction_event() - attachment = error_envelope.items[-1] - - assert error == expected_error(trx, span) - assert transaction == expected_transaction(trx, span) - assert attachment.headers == { - "filename": "hello.txt", - "type": "attachment", - "content_type": "text/plain", - } - assert attachment.payload.bytes == b"Hello World" - - -def test_event4(sentry_init, capture_envelopes, expected_error, expected_transaction): - _init_sentry_sdk(sentry_init) - - envelopes = capture_envelopes() - - with Hub(Hub.current): - sentry_sdk.set_tag("A", 1) # will not be added - - with Hub(Hub.current): # with hub clone - with sentry_sdk.push_scope() as scope: - scope.set_tag("B", 1) # will not be added - - with sentry_sdk.start_transaction( - name="test_transaction", op="test_transaction_op" - ) as trx: - with sentry_sdk.start_span(op="test_span") as span: - with sentry_sdk.configure_scope() as scope: # configure scope - _generate_event_data(scope) - _faulty_function() - - (error_envelope, transaction_envelope) = envelopes - - error = error_envelope.get_event() - transaction = transaction_envelope.get_transaction_event() - attachment = error_envelope.items[-1] - - assert error == expected_error(trx, span) - assert transaction == expected_transaction(trx, span) - assert attachment.headers == { - "filename": "hello.txt", - "type": "attachment", - "content_type": "text/plain", - } - assert attachment.payload.bytes == b"Hello World" - - -def test_event5(sentry_init, capture_envelopes, expected_error, expected_transaction): - _init_sentry_sdk(sentry_init) - - envelopes = capture_envelopes() - - with Hub(Hub.current): - sentry_sdk.set_tag("A", 1) # will not be added - - with Hub(Hub.current): # with hub clone - with sentry_sdk.push_scope() as scope: - scope.set_tag("B", 1) # will not be added - - with sentry_sdk.push_scope() as scope: # push scope - with sentry_sdk.start_transaction( - name="test_transaction", op="test_transaction_op" - ) as trx: - with sentry_sdk.start_span(op="test_span") as span: - _generate_event_data(scope) - _faulty_function() - - (error_envelope, transaction_envelope) = envelopes - - error = error_envelope.get_event() - transaction = transaction_envelope.get_transaction_event() - attachment = error_envelope.items[-1] - - assert error == expected_error(trx, span) - assert transaction == expected_transaction(trx, span) - assert attachment.headers == { - "filename": "hello.txt", - "type": "attachment", - "content_type": "text/plain", - } - assert attachment.payload.bytes == b"Hello World" diff --git a/tests/integrations/opentelemetry/__init__.py b/tests/opentelemetry/__init__.py similarity index 100% rename from tests/integrations/opentelemetry/__init__.py rename to tests/opentelemetry/__init__.py diff --git a/tests/opentelemetry/test_compat.py b/tests/opentelemetry/test_compat.py new file mode 100644 index 0000000000..381d9ad22e --- /dev/null +++ b/tests/opentelemetry/test_compat.py @@ -0,0 +1,99 @@ +import sentry_sdk +from sentry_sdk.tracing import Transaction + + +def test_transaction_name_span_description_compat( + sentry_init, + capture_events, +): + sentry_init(traces_sample_rate=1.0) + + events = capture_events() + + with sentry_sdk.start_span( + name="trx-name", + op="trx-op", + ) as trx: + with sentry_sdk.start_span( + description="span-desc", + op="span-op", + ) as spn: + ... + + assert trx.__class__.__name__ == "Span" + assert trx.op == "trx-op" + assert trx.name == "trx-name" + assert trx.description is None + + assert trx._otel_span is not None + assert trx._otel_span.name == "trx-name" + assert trx._otel_span.attributes["sentry.op"] == "trx-op" + assert trx._otel_span.attributes["sentry.name"] == "trx-name" + assert "sentry.description" not in trx._otel_span.attributes + + assert spn.__class__.__name__ == "Span" + assert spn.op == "span-op" + assert spn.description == "span-desc" + assert spn.name == "span-desc" + + assert spn._otel_span is not None + assert spn._otel_span.name == "span-desc" + assert spn._otel_span.attributes["sentry.op"] == "span-op" + assert spn._otel_span.attributes["sentry.description"] == "span-desc" + + transaction = events[0] + assert transaction["transaction"] == "trx-name" + assert transaction["contexts"]["trace"]["op"] == "trx-op" + assert transaction["contexts"]["trace"]["data"]["sentry.op"] == "trx-op" + assert transaction["contexts"]["trace"]["data"]["sentry.name"] == "trx-name" + assert "sentry.description" not in transaction["contexts"]["trace"]["data"] + + span = transaction["spans"][0] + assert span["description"] == "span-desc" + assert span["op"] == "span-op" + assert span["data"]["sentry.op"] == "span-op" + assert span["data"]["sentry.description"] == "span-desc" + + +def test_start_transaction_compat( + sentry_init, + capture_events, +): + sentry_init(traces_sample_rate=1.0) + + events = capture_events() + + with sentry_sdk.start_transaction( + name="trx-name", + op="trx-op", + ): + ... + + transaction = events[0] + assert transaction["transaction"] == "trx-name" + assert transaction["contexts"]["trace"]["op"] == "trx-op" + assert transaction["contexts"]["trace"]["data"]["sentry.op"] == "trx-op" + assert transaction["contexts"]["trace"]["data"]["sentry.name"] == "trx-name" + assert "sentry.description" not in transaction["contexts"]["trace"]["data"] + + +def test_start_transaction_with_explicit_transaction_compat( + sentry_init, + capture_events, +): + """It should still be possible to provide a ready-made Transaction to start_transaction.""" + sentry_init(traces_sample_rate=1.0) + + events = capture_events() + + transaction = Transaction(name="trx-name", op="trx-op") + + with sentry_sdk.start_transaction(transaction=transaction): + pass + + transaction = events[0] + assert transaction["transaction"] == "trx-name" + assert transaction["contexts"]["trace"]["op"] == "trx-op" + assert transaction["contexts"]["trace"]["data"]["sentry.op"] == "trx-op" + assert transaction["contexts"]["trace"]["data"]["sentry.name"] == "trx-name" + assert "sentry.description" not in transaction["contexts"]["trace"]["data"] diff --git a/tests/integrations/opentelemetry/test_entry_points.py b/tests/opentelemetry/test_entry_points.py similarity index 87% rename from tests/integrations/opentelemetry/test_entry_points.py rename to tests/opentelemetry/test_entry_points.py index cd78209432..efadb67a06 100644 --- a/tests/integrations/opentelemetry/test_entry_points.py +++ b/tests/opentelemetry/test_entry_points.py @@ -3,7 +3,7 @@ from unittest.mock import patch from opentelemetry import propagate -from sentry_sdk.integrations.opentelemetry import SentryPropagator +from sentry_sdk.opentelemetry import SentryPropagator def test_propagator_loaded_if_mentioned_in_environment_variable(): diff --git a/tests/opentelemetry/test_potel.py b/tests/opentelemetry/test_potel.py new file mode 100644 index 0000000000..753f2b4cf2 --- /dev/null +++ b/tests/opentelemetry/test_potel.py @@ -0,0 +1,366 @@ +import pytest +from opentelemetry import trace + +import sentry_sdk +from sentry_sdk.consts import SPANSTATUS +from tests.conftest import ApproxDict + + +tracer = trace.get_tracer(__name__) + + +@pytest.fixture(autouse=True) +def sentry_init_potel(sentry_init): + sentry_init( + traces_sample_rate=1.0, + _experiments={"otel_powered_performance": True}, + ) + + +def test_root_span_transaction_payload_started_with_otel_only(capture_envelopes): + envelopes = capture_envelopes() + + with tracer.start_as_current_span("request"): + pass + + (envelope,) = envelopes + # TODO-neel-potel DSC header + (item,) = envelope.items + payload = item.payload.json + + assert payload["type"] == "transaction" + assert payload["transaction"] == "request" + assert payload["transaction_info"] == {"source": "custom"} + assert payload["timestamp"] is not None + assert payload["start_timestamp"] is not None + + contexts = payload["contexts"] + assert "runtime" in contexts + assert "otel" in contexts + assert "resource" in contexts["otel"] + + trace_context = contexts["trace"] + assert "trace_id" in trace_context + assert "span_id" in trace_context + assert trace_context["origin"] == "manual" + assert trace_context["op"] == "request" + + assert payload["spans"] == [] + + +def test_child_span_payload_started_with_otel_only(capture_envelopes): + envelopes = capture_envelopes() + + with tracer.start_as_current_span("request"): + with tracer.start_as_current_span("db"): + pass + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + (span,) = payload["spans"] + + assert span["op"] == "db" + assert span["description"] == "db" + assert span["origin"] == "manual" + assert span["span_id"] is not None + assert span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert span["timestamp"] is not None + assert span["start_timestamp"] is not None + + +def test_children_span_nesting_started_with_otel_only(capture_envelopes): + envelopes = capture_envelopes() + + with tracer.start_as_current_span("request"): + with tracer.start_as_current_span("db"): + with tracer.start_as_current_span("redis"): + pass + with tracer.start_as_current_span("http"): + pass + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + (db_span, http_span, redis_span) = payload["spans"] + + assert db_span["op"] == "db" + assert redis_span["op"] == "redis" + assert http_span["op"] == "http" + + assert db_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert redis_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert http_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + + assert db_span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert http_span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert redis_span["parent_span_id"] == db_span["span_id"] + + +def test_root_span_transaction_payload_started_with_sentry_only(capture_envelopes): + envelopes = capture_envelopes() + + with sentry_sdk.start_span(description="request"): + pass + + (envelope,) = envelopes + # TODO-neel-potel DSC header + (item,) = envelope.items + payload = item.payload.json + + assert payload["type"] == "transaction" + assert payload["transaction"] == "request" + assert payload["transaction_info"] == {"source": "custom"} + assert payload["timestamp"] is not None + assert payload["start_timestamp"] is not None + + contexts = payload["contexts"] + assert "runtime" in contexts + assert "otel" in contexts + assert "resource" in contexts["otel"] + + trace_context = contexts["trace"] + assert "trace_id" in trace_context + assert "span_id" in trace_context + assert trace_context["origin"] == "manual" + assert trace_context["op"] == "request" + assert trace_context["status"] == "ok" + + assert payload["spans"] == [] + + +def test_child_span_payload_started_with_sentry_only(capture_envelopes): + envelopes = capture_envelopes() + + with sentry_sdk.start_span(description="request"): + with sentry_sdk.start_span(description="db"): + pass + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + (span,) = payload["spans"] + + assert span["op"] == "db" + assert span["description"] == "db" + assert span["origin"] == "manual" + assert span["status"] == "ok" + assert span["span_id"] is not None + assert span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert span["timestamp"] is not None + assert span["start_timestamp"] is not None + + +def test_children_span_nesting_started_with_sentry_only(capture_envelopes): + envelopes = capture_envelopes() + + with sentry_sdk.start_span(description="request"): + with sentry_sdk.start_span(description="db"): + with sentry_sdk.start_span(description="redis"): + pass + with sentry_sdk.start_span(description="http"): + pass + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + (db_span, http_span, redis_span) = payload["spans"] + + assert db_span["op"] == "db" + assert redis_span["op"] == "redis" + assert http_span["op"] == "http" + + assert db_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert redis_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert http_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + + assert db_span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert http_span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert redis_span["parent_span_id"] == db_span["span_id"] + + +def test_children_span_nesting_mixed(capture_envelopes): + envelopes = capture_envelopes() + + with sentry_sdk.start_span(description="request"): + with tracer.start_as_current_span("db"): + with sentry_sdk.start_span(description="redis"): + pass + with tracer.start_as_current_span("http"): + pass + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + (db_span, http_span, redis_span) = payload["spans"] + + assert db_span["op"] == "db" + assert redis_span["op"] == "redis" + assert http_span["op"] == "http" + + assert db_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert redis_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + assert http_span["trace_id"] == payload["contexts"]["trace"]["trace_id"] + + assert db_span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert http_span["parent_span_id"] == payload["contexts"]["trace"]["span_id"] + assert redis_span["parent_span_id"] == db_span["span_id"] + + +def test_span_attributes_in_data_started_with_otel(capture_envelopes): + envelopes = capture_envelopes() + + with tracer.start_as_current_span("request") as request_span: + request_span.set_attributes({"foo": "bar", "baz": 42}) + with tracer.start_as_current_span("db") as db_span: + db_span.set_attributes({"abc": 99, "def": "moo"}) + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + + assert payload["contexts"]["trace"]["data"] == ApproxDict({"foo": "bar", "baz": 42}) + assert payload["spans"][0]["data"] == ApproxDict({"abc": 99, "def": "moo"}) + + +def test_span_data_started_with_sentry(capture_envelopes): + envelopes = capture_envelopes() + + with sentry_sdk.start_span(op="http", description="request") as request_span: + request_span.set_attribute("foo", "bar") + with sentry_sdk.start_span(op="db", description="statement") as db_span: + db_span.set_attribute("baz", 42) + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + + assert payload["contexts"]["trace"]["data"] == ApproxDict( + { + "foo": "bar", + "sentry.origin": "manual", + "sentry.description": "request", + "sentry.op": "http", + } + ) + assert payload["spans"][0]["data"] == ApproxDict( + { + "baz": 42, + "sentry.origin": "manual", + "sentry.description": "statement", + "sentry.op": "db", + } + ) + + +def test_transaction_tags_started_with_otel(capture_envelopes): + envelopes = capture_envelopes() + + sentry_sdk.set_tag("tag.global", 99) + with tracer.start_as_current_span("request"): + sentry_sdk.set_tag("tag.inner", "foo") + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + + assert payload["tags"] == {"tag.global": 99, "tag.inner": "foo"} + + +def test_transaction_tags_started_with_sentry(capture_envelopes): + envelopes = capture_envelopes() + + sentry_sdk.set_tag("tag.global", 99) + with sentry_sdk.start_span(description="request"): + sentry_sdk.set_tag("tag.inner", "foo") + + (envelope,) = envelopes + (item,) = envelope.items + payload = item.payload.json + + assert payload["tags"] == {"tag.global": 99, "tag.inner": "foo"} + + +def test_multiple_transaction_tags_isolation_scope_started_with_otel(capture_envelopes): + envelopes = capture_envelopes() + + sentry_sdk.set_tag("tag.global", 99) + with sentry_sdk.isolation_scope(): + with tracer.start_as_current_span("request a"): + sentry_sdk.set_tag("tag.inner.a", "a") + with sentry_sdk.isolation_scope(): + with tracer.start_as_current_span("request b"): + sentry_sdk.set_tag("tag.inner.b", "b") + + (payload_a, payload_b) = [envelope.items[0].payload.json for envelope in envelopes] + + assert payload_a["tags"] == {"tag.global": 99, "tag.inner.a": "a"} + assert payload_b["tags"] == {"tag.global": 99, "tag.inner.b": "b"} + + +def test_multiple_transaction_tags_isolation_scope_started_with_sentry( + capture_envelopes, +): + envelopes = capture_envelopes() + + sentry_sdk.set_tag("tag.global", 99) + with sentry_sdk.isolation_scope(): + with sentry_sdk.start_span(description="request a"): + sentry_sdk.set_tag("tag.inner.a", "a") + with sentry_sdk.isolation_scope(): + with sentry_sdk.start_span(description="request b"): + sentry_sdk.set_tag("tag.inner.b", "b") + + (payload_a, payload_b) = [envelope.items[0].payload.json for envelope in envelopes] + + assert payload_a["tags"] == {"tag.global": 99, "tag.inner.a": "a"} + assert payload_b["tags"] == {"tag.global": 99, "tag.inner.b": "b"} + + +def test_potel_span_root_span_references(): + with sentry_sdk.start_span(description="request") as request_span: + assert request_span.is_root_span + assert request_span.root_span == request_span + with sentry_sdk.start_span(description="db") as db_span: + assert not db_span.is_root_span + assert db_span.root_span == request_span + with sentry_sdk.start_span(description="redis") as redis_span: + assert not redis_span.is_root_span + assert redis_span.root_span == request_span + with sentry_sdk.start_span(description="http") as http_span: + assert not http_span.is_root_span + assert http_span.root_span == request_span + + +@pytest.mark.parametrize( + "status_in,status_out", + [ + (None, None), + ("", SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.OK, SPANSTATUS.OK), + (SPANSTATUS.ABORTED, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.ALREADY_EXISTS, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.CANCELLED, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.DATA_LOSS, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.DEADLINE_EXCEEDED, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.FAILED_PRECONDITION, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.INTERNAL_ERROR, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.INVALID_ARGUMENT, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.NOT_FOUND, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.OUT_OF_RANGE, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.PERMISSION_DENIED, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.RESOURCE_EXHAUSTED, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.UNAUTHENTICATED, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.UNAVAILABLE, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.UNIMPLEMENTED, SPANSTATUS.UNKNOWN_ERROR), + (SPANSTATUS.UNKNOWN_ERROR, SPANSTATUS.UNKNOWN_ERROR), + ], +) +def test_potel_span_status(status_in, status_out): + span = sentry_sdk.start_span(name="test") + if status_in is not None: + span.set_status(status_in) + + assert span.status == status_out diff --git a/tests/opentelemetry/test_propagator.py b/tests/opentelemetry/test_propagator.py new file mode 100644 index 0000000000..49437fa896 --- /dev/null +++ b/tests/opentelemetry/test_propagator.py @@ -0,0 +1,210 @@ +from unittest.mock import MagicMock, patch + +import pytest + +from opentelemetry.trace.propagation import get_current_span +from opentelemetry.propagators.textmap import DefaultSetter + +import sentry_sdk +from sentry_sdk.opentelemetry.consts import ( + SENTRY_BAGGAGE_KEY, + SENTRY_TRACE_KEY, +) +from sentry_sdk.opentelemetry import SentryPropagator +from tests.conftest import SortedBaggage + + +@pytest.mark.forked +def test_extract_no_context_no_sentry_trace_header(): + """ + No context and NO Sentry trace data in getter. + Extract should return empty context. + """ + carrier = None + context = {} + getter = MagicMock() + getter.get.return_value = None + + modified_context = SentryPropagator().extract(carrier, context, getter) + + assert modified_context == {} + + +@pytest.mark.forked +def test_extract_context_no_sentry_trace_header(): + """ + Context but NO Sentry trace data in getter. + Extract should return context as is. + """ + carrier = None + context = {"some": "value"} + getter = MagicMock() + getter.get.return_value = None + + modified_context = SentryPropagator().extract(carrier, context, getter) + + assert modified_context == context + + +@pytest.mark.forked +def test_extract_empty_context_sentry_trace_header_no_baggage(): + """ + Empty context but Sentry trace data but NO Baggage in getter. + Extract should return context that has empty baggage in it and also a NoopSpan with span_id and trace_id. + """ + carrier = None + context = {} + getter = MagicMock() + getter.get.side_effect = [ + ["1234567890abcdef1234567890abcdef-1234567890abcdef-1"], + None, + ] + + modified_context = SentryPropagator().extract(carrier, context, getter) + + assert len(modified_context.keys()) == 3 + + assert modified_context[SENTRY_TRACE_KEY] == { + "trace_id": "1234567890abcdef1234567890abcdef", + "parent_span_id": "1234567890abcdef", + "parent_sampled": True, + } + assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == "" + + span_context = get_current_span(modified_context).get_span_context() + assert span_context.span_id == int("1234567890abcdef", 16) + assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16) + + +@pytest.mark.forked +def test_extract_context_sentry_trace_header_baggage(): + """ + Empty context but Sentry trace data and Baggage in getter. + Extract should return context that has baggage in it and also a NoopSpan with span_id and trace_id. + """ + baggage_header = ( + "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, " + "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, " + "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;" + ) + + carrier = None + context = {"some": "value"} + getter = MagicMock() + getter.get.side_effect = [ + ["1234567890abcdef1234567890abcdef-1234567890abcdef-1"], + [baggage_header], + ] + + modified_context = SentryPropagator().extract(carrier, context, getter) + + assert len(modified_context.keys()) == 4 + + assert modified_context[SENTRY_TRACE_KEY] == { + "trace_id": "1234567890abcdef1234567890abcdef", + "parent_span_id": "1234567890abcdef", + "parent_sampled": True, + } + + assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == ( + "sentry-trace_id=771a43a4192642f0b136d5159a501700," + "sentry-public_key=49d0f7386ad645858ae85020e393bef3," + "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie" + ) + + span_context = get_current_span(modified_context).get_span_context() + assert span_context.span_id == int("1234567890abcdef", 16) + assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16) + + +def test_inject_continue_trace(sentry_init): + sentry_init(traces_sample_rate=1.0) + + carrier = {} + setter = DefaultSetter() + + trace_id = "771a43a4192642f0b136d5159a501700" + sentry_trace = "771a43a4192642f0b136d5159a501700-1234567890abcdef-1" + baggage = ( + "sentry-trace_id=771a43a4192642f0b136d5159a501700," + "sentry-public_key=frontendpublickey," + "sentry-sample_rate=0.01337," + "sentry-sampled=true," + "sentry-release=myfrontend," + "sentry-environment=bird," + "sentry-transaction=bar" + ) + incoming_headers = { + "HTTP_SENTRY_TRACE": sentry_trace, + "HTTP_BAGGAGE": baggage, + } + + expected_baggage = baggage + ",sentry-sample_rand=0.001111" + + with patch( + "sentry_sdk.tracing_utils.Random.uniform", + return_value=0.001111, + ): + with sentry_sdk.continue_trace(incoming_headers): + with sentry_sdk.start_span(name="foo") as span: + SentryPropagator().inject(carrier, setter=setter) + assert carrier["sentry-trace"] == f"{trace_id}-{span.span_id}-1" + assert carrier["baggage"] == SortedBaggage(expected_baggage) + + +def test_inject_continue_trace_incoming_sample_rand(sentry_init): + sentry_init(traces_sample_rate=1.0) + + carrier = {} + setter = DefaultSetter() + + trace_id = "771a43a4192642f0b136d5159a501700" + sentry_trace = "771a43a4192642f0b136d5159a501700-1234567890abcdef-1" + baggage = ( + "sentry-trace_id=771a43a4192642f0b136d5159a501700," + "sentry-public_key=frontendpublickey," + "sentry-sample_rate=0.01337," + "sentry-sampled=true," + "sentry-release=myfrontend," + "sentry-environment=bird," + "sentry-transaction=bar," + "sentry-sample_rand=0.002849" + ) + incoming_headers = { + "HTTP_SENTRY_TRACE": sentry_trace, + "HTTP_BAGGAGE": baggage, + } + + with sentry_sdk.continue_trace(incoming_headers): + with sentry_sdk.start_span(name="foo") as span: + SentryPropagator().inject(carrier, setter=setter) + assert carrier["sentry-trace"] == f"{trace_id}-{span.span_id}-1" + assert carrier["baggage"] == SortedBaggage(baggage) + + +def test_inject_head_sdk(sentry_init): + sentry_init(traces_sample_rate=1.0, release="release") + + carrier = {} + setter = DefaultSetter() + + expected_baggage = ( + "sentry-transaction=foo," + "sentry-release=release," + "sentry-environment=production," + "sentry-trace_id={trace_id}," + "sentry-sample_rate=1.0," + "sentry-sampled=true," + "sentry-sample_rand=0.111111" + ) + + with patch( + "sentry_sdk.tracing_utils.Random.uniform", + return_value=0.111111, + ): + with sentry_sdk.start_span(name="foo") as span: + SentryPropagator().inject(carrier, setter=setter) + assert carrier["sentry-trace"] == f"{span.trace_id}-{span.span_id}-1" + assert carrier["baggage"] == SortedBaggage( + expected_baggage.format(trace_id=span.trace_id) + ) diff --git a/tests/opentelemetry/test_sampler.py b/tests/opentelemetry/test_sampler.py new file mode 100644 index 0000000000..4ca1e1963f --- /dev/null +++ b/tests/opentelemetry/test_sampler.py @@ -0,0 +1,334 @@ +import pytest +from unittest import mock + +from opentelemetry import trace + +import sentry_sdk + + +USE_DEFAULT_TRACES_SAMPLE_RATE = -1 + +tracer = trace.get_tracer(__name__) + + +@pytest.mark.parametrize( + "traces_sample_rate, expected_num_of_envelopes", + [ + # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=0 will be used) + (USE_DEFAULT_TRACES_SAMPLE_RATE, 0), + # traces_sample_rate=None means do not create new traces, and also do not continue incoming traces. So, no envelopes at all. + (None, 0), + # traces_sample_rate=0 means do not create new traces (0% of the requests), but continue incoming traces. So envelopes will be created only if there is an incoming trace. + (0, 0), + # traces_sample_rate=1 means create new traces for 100% of requests (and also continue incoming traces, of course). + (1, 2), + ], +) +def test_sampling_traces_sample_rate_0_or_100( + sentry_init, + capture_envelopes, + traces_sample_rate, + expected_num_of_envelopes, +): + kwargs = {} + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: + kwargs["traces_sample_rate"] = traces_sample_rate + + sentry_init(**kwargs) + + envelopes = capture_envelopes() + + with sentry_sdk.start_span(description="request a"): + with sentry_sdk.start_span(description="cache a"): + with sentry_sdk.start_span(description="db a"): + ... + + with sentry_sdk.start_span(description="request b"): + with sentry_sdk.start_span(description="cache b"): + with sentry_sdk.start_span(description="db b"): + ... + + assert len(envelopes) == expected_num_of_envelopes + + if expected_num_of_envelopes == 2: + (transaction_a, transaction_b) = [ + envelope.items[0].payload.json for envelope in envelopes + ] + + assert transaction_a["transaction"] == "request a" + assert transaction_b["transaction"] == "request b" + + spans_a = transaction_a["spans"] + assert len(spans_a) == 2 + assert spans_a[0]["description"] == "cache a" + assert spans_a[1]["description"] == "db a" + spans_b = transaction_b["spans"] + assert len(spans_b) == 2 + assert spans_b[0]["description"] == "cache b" + assert spans_b[1]["description"] == "db b" + + +def test_sampling_traces_sample_rate_50(sentry_init, capture_envelopes): + sentry_init(traces_sample_rate=0.5) + + envelopes = capture_envelopes() + + with mock.patch( + "sentry_sdk.tracing_utils.Random.uniform", return_value=0.2 + ): # drop + with sentry_sdk.start_span(description="request a"): + with sentry_sdk.start_span(description="cache a"): + with sentry_sdk.start_span(description="db a"): + ... + + with mock.patch( + "sentry_sdk.tracing_utils.Random.uniform", return_value=0.7 + ): # keep + with sentry_sdk.start_span(description="request b"): + with sentry_sdk.start_span(description="cache b"): + with sentry_sdk.start_span(description="db b"): + ... + + assert len(envelopes) == 1 + + (envelope,) = envelopes + transaction = envelope.items[0].payload.json + assert transaction["transaction"] == "request a" + spans = transaction["spans"] + assert len(spans) == 2 + assert spans[0]["description"] == "cache a" + assert spans[1]["description"] == "db a" + + +def test_sampling_traces_sampler(sentry_init, capture_envelopes): + def keep_only_a(sampling_context): + if " a" in sampling_context["transaction_context"]["name"]: + return 1 + else: + return 0 + + sentry_init(traces_sampler=keep_only_a) + + envelopes = capture_envelopes() + + # children inherit from root spans + with sentry_sdk.start_span(description="request a"): # keep + with sentry_sdk.start_span(description="cache a"): + with sentry_sdk.start_span(description="db a"): + ... + + with sentry_sdk.start_span(description="request b"): # drop + with sentry_sdk.start_span(description="cache b"): + with sentry_sdk.start_span(description="db b"): + ... + + with sentry_sdk.start_span(description="request c"): # drop + with sentry_sdk.start_span(description="cache a c"): + with sentry_sdk.start_span(description="db a c"): + ... + + with sentry_sdk.start_span(description="new a c"): # keep + with sentry_sdk.start_span(description="cache c"): + with sentry_sdk.start_span(description="db c"): + ... + + assert len(envelopes) == 2 + (envelope1, envelope2) = envelopes + transaction1 = envelope1.items[0].payload.json + transaction2 = envelope2.items[0].payload.json + + assert transaction1["transaction"] == "request a" + assert len(transaction1["spans"]) == 2 + assert transaction2["transaction"] == "new a c" + assert len(transaction2["spans"]) == 2 + + +def test_sampling_traces_sampler_boolean(sentry_init, capture_envelopes): + def keep_only_a(sampling_context): + if " a" in sampling_context["transaction_context"]["name"]: + return True + else: + return False + + sentry_init( + traces_sample_rate=1.0, + traces_sampler=keep_only_a, + ) + + envelopes = capture_envelopes() + + with sentry_sdk.start_span(description="request a"): # keep + with sentry_sdk.start_span(description="cache a"): + with sentry_sdk.start_span(description="db X"): + ... + + with sentry_sdk.start_span(description="request b"): # drop + with sentry_sdk.start_span(description="cache b"): + with sentry_sdk.start_span(description="db b"): + ... + + assert len(envelopes) == 1 + (envelope,) = envelopes + transaction = envelope.items[0].payload.json + + assert transaction["transaction"] == "request a" + assert len(transaction["spans"]) == 2 + + +@pytest.mark.parametrize( + "traces_sample_rate, expected_num_of_envelopes", + [ + # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=None will be used) + (USE_DEFAULT_TRACES_SAMPLE_RATE, 0), + # traces_sample_rate=None means do not create new traces, and also do not continue incoming traces. So, no envelopes at all. + (None, 0), + # traces_sample_rate=0 means do not create new traces (0% of the requests), but continue incoming traces. So envelopes will be created only if there is an incoming trace. + (0, 1), + # traces_sample_rate=1 means create new traces for 100% of requests (and also continue incoming traces, of course). + (1, 1), + ], +) +def test_sampling_parent_sampled( + sentry_init, + traces_sample_rate, + expected_num_of_envelopes, + capture_envelopes, +): + kwargs = {} + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: + kwargs["traces_sample_rate"] = traces_sample_rate + + sentry_init(**kwargs) + + envelopes = capture_envelopes() + + # The upstream service has sampled the request + headers = { + "sentry-trace": "771a43a4192642f0b136d5159a501700-1234567890abcdef-1", + } + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span(description="request a"): + with sentry_sdk.start_span(description="cache a"): + with sentry_sdk.start_span(description="db X"): + ... + + assert len(envelopes) == expected_num_of_envelopes + + if expected_num_of_envelopes == 1: + (envelope,) = envelopes + transaction = envelope.items[0].payload.json + assert transaction["transaction"] == "request a" + assert ( + transaction["contexts"]["trace"]["trace_id"] + == "771a43a4192642f0b136d5159a501700" + ) + assert transaction["contexts"]["trace"]["span_id"] != "1234567890abcdef" + assert transaction["contexts"]["trace"]["parent_span_id"] == "1234567890abcdef" + + +@pytest.mark.parametrize( + "traces_sample_rate, upstream_sampled, expected_num_of_envelopes", + [ + # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=None will be used) + (USE_DEFAULT_TRACES_SAMPLE_RATE, 0, 0), + (USE_DEFAULT_TRACES_SAMPLE_RATE, 1, 0), + # traces_sample_rate=None means do not create new traces, and also do not continue incoming traces. So, no envelopes at all. + (None, 0, 0), + (None, 1, 0), + # traces_sample_rate=0 means do not create new traces (0% of the requests), but continue incoming traces. So envelopes will be created only if there is an incoming trace. + (0, 0, 0), + (0, 1, 1), + # traces_sample_rate=1 means create new traces for 100% of requests (and also continue incoming traces, of course). + (1, 0, 0), + (1, 1, 1), + ], +) +def test_sampling_parent_dropped( + sentry_init, + traces_sample_rate, + upstream_sampled, + expected_num_of_envelopes, + capture_envelopes, +): + kwargs = {} + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: + kwargs["traces_sample_rate"] = traces_sample_rate + + sentry_init(**kwargs) + + envelopes = capture_envelopes() + + # The upstream service has dropped the request + headers = { + "sentry-trace": f"771a43a4192642f0b136d5159a501700-1234567890abcdef-{upstream_sampled}", + } + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span(description="request a"): + with sentry_sdk.start_span(description="cache a"): + with sentry_sdk.start_span(description="db X"): + ... + + assert len(envelopes) == expected_num_of_envelopes + + if expected_num_of_envelopes == 1: + (envelope,) = envelopes + transaction = envelope.items[0].payload.json + assert transaction["transaction"] == "request a" + assert ( + transaction["contexts"]["trace"]["trace_id"] + == "771a43a4192642f0b136d5159a501700" + ) + assert transaction["contexts"]["trace"]["span_id"] != "1234567890abcdef" + assert transaction["contexts"]["trace"]["parent_span_id"] == "1234567890abcdef" + + +@pytest.mark.parametrize( + "traces_sample_rate, expected_num_of_envelopes", + [ + # special case for testing, do not pass any traces_sample_rate to init() (the default traces_sample_rate=0 will be used) + (USE_DEFAULT_TRACES_SAMPLE_RATE, 0), + # traces_sample_rate=None means do not create new traces, and also do not continue incoming traces. So, no envelopes at all. + (None, 0), + # traces_sample_rate=0 means do not create new traces (0% of the requests), but continue incoming traces. So envelopes will be created only if there is an incoming trace. + (0, 0), + # traces_sample_rate=1 means create new traces for 100% of requests (and also continue incoming traces, of course). + (1, 1), + ], +) +def test_sampling_parent_deferred( + sentry_init, + traces_sample_rate, + expected_num_of_envelopes, + capture_envelopes, +): + kwargs = {} + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: + kwargs["traces_sample_rate"] = traces_sample_rate + + sentry_init(**kwargs) + + envelopes = capture_envelopes() + + # The upstream service has deferred the sampling decision to us. + headers = { + "sentry-trace": "771a43a4192642f0b136d5159a501700-1234567890abcdef-", + } + + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span(description="request a"): + with sentry_sdk.start_span(description="cache a"): + with sentry_sdk.start_span(description="db X"): + ... + + assert len(envelopes) == expected_num_of_envelopes + + if expected_num_of_envelopes == 1: + (envelope,) = envelopes + transaction = envelope.items[0].payload.json + assert transaction["transaction"] == "request a" + assert ( + transaction["contexts"]["trace"]["trace_id"] + == "771a43a4192642f0b136d5159a501700" + ) + assert transaction["contexts"]["trace"]["span_id"] != "1234567890abcdef" + assert transaction["contexts"]["trace"]["parent_span_id"] == "1234567890abcdef" diff --git a/tests/opentelemetry/test_span_processor.py b/tests/opentelemetry/test_span_processor.py new file mode 100644 index 0000000000..7d6283d4ea --- /dev/null +++ b/tests/opentelemetry/test_span_processor.py @@ -0,0 +1,19 @@ +import sentry_sdk + + +def test_span_processor_omits_underscore_attributes(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0) + + events = capture_events() + + with sentry_sdk.start_span(): + with sentry_sdk.start_span() as span: + span.set_attribute("_internal", 47) + span.set_attribute("noninternal", 23) + + assert span._otel_span.attributes["_internal"] == 47 + assert span._otel_span.attributes["noninternal"] == 23 + + outgoing_span = events[0]["spans"][0] + assert "_internal" not in outgoing_span["data"] + assert "noninternal" in outgoing_span["data"] diff --git a/tests/opentelemetry/test_utils.py b/tests/opentelemetry/test_utils.py new file mode 100644 index 0000000000..a73efd9b3b --- /dev/null +++ b/tests/opentelemetry/test_utils.py @@ -0,0 +1,404 @@ +from unittest.mock import MagicMock + +import pytest +from opentelemetry.trace import SpanKind, Status, StatusCode +from opentelemetry.version import __version__ as OTEL_VERSION + +from sentry_sdk.opentelemetry.utils import ( + extract_span_data, + extract_span_status, + span_data_for_db_query, + span_data_for_http_method, +) +from sentry_sdk.utils import parse_version + +OTEL_VERSION = parse_version(OTEL_VERSION) + + +@pytest.mark.parametrize( + "name, status, attributes, expected", + [ + ( + "OTel Span Blank", + Status(StatusCode.UNSET), + {}, + { + "op": "OTel Span Blank", + "description": "OTel Span Blank", + "status": None, + "http_status_code": None, + "origin": None, + }, + ), + ( + "OTel Span RPC", + Status(StatusCode.UNSET), + { + "rpc.service": "myservice.EchoService", + }, + { + "op": "rpc", + "description": "OTel Span RPC", + "status": None, + "http_status_code": None, + "origin": None, + }, + ), + ( + "OTel Span Messaging", + Status(StatusCode.UNSET), + { + "messaging.system": "rabbitmq", + }, + { + "op": "message", + "description": "OTel Span Messaging", + "status": None, + "http_status_code": None, + "origin": None, + }, + ), + ( + "OTel Span FaaS", + Status(StatusCode.UNSET), + { + "faas.trigger": "pubsub", + }, + { + "op": "pubsub", + "description": "OTel Span FaaS", + "status": None, + "http_status_code": None, + "origin": None, + }, + ), + ], +) +def test_extract_span_data(name, status, attributes, expected): + otel_span = MagicMock() + otel_span.name = name + otel_span.status = Status(StatusCode.UNSET) + otel_span.attributes = attributes + + op, description, status, http_status_code, origin = extract_span_data(otel_span) + result = { + "op": op, + "description": description, + "status": status, + "http_status_code": http_status_code, + "origin": origin, + } + assert result == expected + + +@pytest.mark.parametrize( + "kind, status, attributes, expected", + [ + ( + SpanKind.CLIENT, + Status(StatusCode.OK), + { + "http.method": "GET", + "http.target": None, # no location for description + "net.peer.name": None, + "http.url": None, + }, + { + "op": "http.client", + "description": "GET", + "status": "ok", + "http_status_code": None, + "origin": None, + }, + ), + ( + SpanKind.CLIENT, + Status(StatusCode.OK), + { + "http.method": "GET", + "http.target": "/target", # this can be the location in the description + }, + { + "op": "http.client", + "description": "GET /target", + "status": "ok", + "http_status_code": None, + "origin": None, + }, + ), + ( + SpanKind.CLIENT, + Status(StatusCode.OK), + { + "http.method": "GET", + "net.peer.name": "example.com", # this can be the location in the description + }, + { + "op": "http.client", + "description": "GET example.com", + "status": "ok", + "http_status_code": None, + "origin": None, + }, + ), + ( + SpanKind.CLIENT, + Status(StatusCode.OK), + { + "http.method": "GET", + "http.target": "/target", # target takes precedence over net.peer.name + "net.peer.name": "example.com", + }, + { + "op": "http.client", + "description": "GET /target", + "status": "ok", + "http_status_code": None, + "origin": None, + }, + ), + ( + SpanKind.CLIENT, + Status(StatusCode.OK), + { + "http.method": "GET", + "http.url": "https://username:secretpwd@example.com/bla/?secret=123&anothersecret=456", # sensitive data is stripped + }, + { + "op": "http.client", + "description": "GET https://example.com/bla/", + "status": "ok", + "http_status_code": None, + "origin": None, + }, + ), + ], +) +def test_span_data_for_http_method(kind, status, attributes, expected): + otel_span = MagicMock() + otel_span.kind = kind + otel_span.status = status + otel_span.attributes = attributes + + op, description, status, http_status_code, origin = span_data_for_http_method( + otel_span + ) + result = { + "op": op, + "description": description, + "status": status, + "http_status_code": http_status_code, + "origin": origin, + } + assert result == expected + + +def test_span_data_for_db_query(): + otel_span = MagicMock() + otel_span.name = "OTel Span" + otel_span.attributes = {} + + op, description, status, http_status, origin = span_data_for_db_query(otel_span) + assert op == "db" + assert description == "OTel Span" + assert status is None + assert http_status is None + assert origin is None + + otel_span.attributes = {"db.statement": "SELECT * FROM table;"} + + op, description, status, http_status, origin = span_data_for_db_query(otel_span) + assert op == "db" + assert description == "SELECT * FROM table;" + assert status is None + assert http_status is None + assert origin is None + + +@pytest.mark.parametrize( + "kind, status, attributes, expected", + [ + ( + SpanKind.CLIENT, + None, # None means unknown error + { + "http.method": "POST", + "http.route": "/some/route", + }, + { + "status": "unknown_error", + "http_status_code": None, + }, + ), + ( + SpanKind.CLIENT, + None, + { + "http.method": "POST", + "http.route": "/some/route", + "http.status_code": 502, # Take this status in case of None status + }, + { + "status": "internal_error", + "http_status_code": 502, + }, + ), + ( + SpanKind.SERVER, + Status(StatusCode.UNSET), + { + "http.method": "POST", + "http.route": "/some/route", + }, + { + "status": None, + "http_status_code": None, + }, + ), + ( + SpanKind.SERVER, + Status(StatusCode.UNSET), + { + "http.method": "POST", + "http.route": "/some/route", + "http.status_code": 502, # Take this status in case of UNSET status + }, + { + "status": "internal_error", + "http_status_code": 502, + }, + ), + ( + SpanKind.SERVER, + None, + { + "http.method": "POST", + "http.route": "/some/route", + "http.status_code": 502, + "http.response.status_code": 503, # this takes precedence over deprecated http.status_code + }, + { + "status": "unavailable", + "http_status_code": 503, + # old otel versions won't take the new attribute into account + "status_old": "internal_error", + "http_status_code_old": 502, + }, + ), + ( + SpanKind.SERVER, + Status(StatusCode.UNSET), + { + "http.method": "POST", + "http.route": "/some/route", + "http.status_code": 502, + "http.response.status_code": 503, # this takes precedence over deprecated http.status_code + }, + { + "status": "unavailable", + "http_status_code": 503, + # old otel versions won't take the new attribute into account + "status_old": "internal_error", + "http_status_code_old": 502, + }, + ), + ( + SpanKind.SERVER, + Status(StatusCode.OK), # OK status is taken right away + { + "http.method": "POST", + "http.route": "/some/route", + }, + { + "status": "ok", + "http_status_code": None, + }, + ), + ( + SpanKind.SERVER, + Status(StatusCode.OK), # OK status is taken right away + { + "http.method": "POST", + "http.route": "/some/route", + "http.response.status_code": 200, + "http.status_code": 200, + }, + { + "status": "ok", + "http_status_code": 200, + }, + ), + ( + SpanKind.SERVER, + Status( + StatusCode.ERROR + ), # Error status without description gets the http status from attributes + { + "http.method": "POST", + "http.route": "/some/route", + "http.response.status_code": 401, + "http.status_code": 401, + }, + { + "status": "unauthenticated", + "http_status_code": 401, + }, + ), + ( + SpanKind.SERVER, + Status(StatusCode.ERROR, "I'm a teapot"), + { + "http.method": "POST", + "http.route": "/some/route", + "http.response.status_code": 418, + "http.status_code": 418, + }, + { + "status": "invalid_argument", + "http_status_code": 418, + }, + ), + ( + SpanKind.SERVER, + Status( + StatusCode.ERROR, "unimplemented" + ), # Error status with known description is taken (grpc errors) + { + "http.method": "POST", + "http.route": "/some/route", + }, + { + "status": "unimplemented", + "http_status_code": None, + }, + ), + ], +) +def test_extract_span_status(kind, status, attributes, expected): + otel_span = MagicMock() + otel_span.kind = kind + otel_span.status = status + otel_span.attributes = attributes + + status, http_status_code = extract_span_status(otel_span) + result = { + "status": status, + "http_status_code": http_status_code, + } + + if ( + OTEL_VERSION < (1, 21) + and "status_old" in expected + and "http_status_code_old" in expected + ): + expected = { + "status": expected["status_old"], + "http_status_code": expected["http_status_code_old"], + } + else: + expected = { + "status": expected["status"], + "http_status_code": expected["http_status_code"], + } + + assert result == expected diff --git a/tests/profiler/test_continuous_profiler.py b/tests/profiler/test_continuous_profiler.py index 991f8bda5d..27994648f8 100644 --- a/tests/profiler/test_continuous_profiler.py +++ b/tests/profiler/test_continuous_profiler.py @@ -11,9 +11,7 @@ get_profiler_id, setup_continuous_profiler, start_profiler, - start_profile_session, stop_profiler, - stop_profile_session, ) from tests.conftest import ApproxDict @@ -26,25 +24,16 @@ requires_gevent = pytest.mark.skipif(gevent is None, reason="gevent not enabled") -def get_client_options(use_top_level_profiler_mode): +def get_client_options(): def client_options( mode=None, auto_start=None, profile_session_sample_rate=1.0, lifecycle="manual" ): - if use_top_level_profiler_mode: - return { - "profile_lifecycle": lifecycle, - "profiler_mode": mode, - "profile_session_sample_rate": profile_session_sample_rate, - "_experiments": { - "continuous_profiling_auto_start": auto_start, - }, - } return { "profile_lifecycle": lifecycle, + "profiler_mode": mode, "profile_session_sample_rate": profile_session_sample_rate, "_experiments": { "continuous_profiling_auto_start": auto_start, - "continuous_profiling_mode": mode, }, } @@ -62,8 +51,7 @@ def client_options( @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) def test_continuous_profiler_invalid_mode(mode, make_options, teardown_profiling): @@ -85,8 +73,7 @@ def test_continuous_profiler_invalid_mode(mode, make_options, teardown_profiling @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) def test_continuous_profiler_valid_mode(mode, make_options, teardown_profiling): @@ -108,8 +95,7 @@ def test_continuous_profiler_valid_mode(mode, make_options, teardown_profiling): @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) def test_continuous_profiler_setup_twice(mode, make_options, teardown_profiling): @@ -214,26 +200,10 @@ def assert_single_transaction_without_profile_chunks(envelopes): pytest.param("gevent", marks=requires_gevent), ], ) -@pytest.mark.parametrize( - ["start_profiler_func", "stop_profiler_func"], - [ - pytest.param( - start_profile_session, - stop_profile_session, - id="start_profile_session/stop_profile_session (deprecated)", - ), - pytest.param( - start_profiler, - stop_profiler, - id="start_profiler/stop_profiler", - ), - ], -) @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) @mock.patch("sentry_sdk.profiler.continuous_profiler.PROFILE_BUFFER_SECONDS", 0.01) @@ -241,8 +211,6 @@ def test_continuous_profiler_auto_start_and_manual_stop( sentry_init, capture_envelopes, mode, - start_profiler_func, - stop_profiler_func, make_options, teardown_profiling, ): @@ -256,30 +224,30 @@ def test_continuous_profiler_auto_start_and_manual_stop( thread = threading.current_thread() - with sentry_sdk.start_transaction(name="profiling"): + with sentry_sdk.start_span(name="profiling"): with sentry_sdk.start_span(op="op"): - time.sleep(0.05) + time.sleep(0.1) assert_single_transaction_with_profile_chunks(envelopes, thread) for _ in range(3): - stop_profiler_func() + stop_profiler() envelopes.clear() - with sentry_sdk.start_transaction(name="profiling"): + with sentry_sdk.start_span(name="profiling"): with sentry_sdk.start_span(op="op"): - time.sleep(0.05) + time.sleep(0.1) assert_single_transaction_without_profile_chunks(envelopes) - start_profiler_func() + start_profiler() envelopes.clear() - with sentry_sdk.start_transaction(name="profiling"): + with sentry_sdk.start_span(name="profiling"): with sentry_sdk.start_span(op="op"): - time.sleep(0.05) + time.sleep(0.1) assert_single_transaction_with_profile_chunks(envelopes, thread) @@ -291,26 +259,10 @@ def test_continuous_profiler_auto_start_and_manual_stop( pytest.param("gevent", marks=requires_gevent), ], ) -@pytest.mark.parametrize( - ["start_profiler_func", "stop_profiler_func"], - [ - pytest.param( - start_profile_session, - stop_profile_session, - id="start_profile_session/stop_profile_session (deprecated)", - ), - pytest.param( - start_profiler, - stop_profiler, - id="start_profiler/stop_profiler", - ), - ], -) @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) @mock.patch("sentry_sdk.profiler.continuous_profiler.PROFILE_BUFFER_SECONDS", 0.01) @@ -318,8 +270,6 @@ def test_continuous_profiler_manual_start_and_stop_sampled( sentry_init, capture_envelopes, mode, - start_profiler_func, - stop_profiler_func, make_options, teardown_profiling, ): @@ -336,11 +286,11 @@ def test_continuous_profiler_manual_start_and_stop_sampled( thread = threading.current_thread() for _ in range(3): - start_profiler_func() + start_profiler() envelopes.clear() - with sentry_sdk.start_transaction(name="profiling"): + with sentry_sdk.start_span(name="profiling"): assert get_profiler_id() is not None, "profiler should be running" with sentry_sdk.start_span(op="op"): time.sleep(0.1) @@ -350,14 +300,14 @@ def test_continuous_profiler_manual_start_and_stop_sampled( assert get_profiler_id() is not None, "profiler should be running" - stop_profiler_func() + stop_profiler() # the profiler stops immediately in manual mode assert get_profiler_id() is None, "profiler should not be running" envelopes.clear() - with sentry_sdk.start_transaction(name="profiling"): + with sentry_sdk.start_span(name="profiling"): assert get_profiler_id() is None, "profiler should not be running" with sentry_sdk.start_span(op="op"): time.sleep(0.1) @@ -373,34 +323,16 @@ def test_continuous_profiler_manual_start_and_stop_sampled( pytest.param("gevent", marks=requires_gevent), ], ) -@pytest.mark.parametrize( - ["start_profiler_func", "stop_profiler_func"], - [ - pytest.param( - start_profile_session, - stop_profile_session, - id="start_profile_session/stop_profile_session (deprecated)", - ), - pytest.param( - start_profiler, - stop_profiler, - id="start_profiler/stop_profiler", - ), - ], -) @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) def test_continuous_profiler_manual_start_and_stop_unsampled( sentry_init, capture_envelopes, mode, - start_profiler_func, - stop_profiler_func, make_options, teardown_profiling, ): @@ -414,15 +346,15 @@ def test_continuous_profiler_manual_start_and_stop_unsampled( envelopes = capture_envelopes() - start_profiler_func() + start_profiler() - with sentry_sdk.start_transaction(name="profiling"): + with sentry_sdk.start_span(name="profiling"): with sentry_sdk.start_span(op="op"): time.sleep(0.05) assert_single_transaction_without_profile_chunks(envelopes) - stop_profiler_func() + stop_profiler() @pytest.mark.parametrize( @@ -435,8 +367,7 @@ def test_continuous_profiler_manual_start_and_stop_unsampled( @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) @mock.patch("sentry_sdk.profiler.continuous_profiler.DEFAULT_SAMPLING_FREQUENCY", 21) @@ -462,7 +393,7 @@ def test_continuous_profiler_auto_start_and_stop_sampled( for _ in range(3): envelopes.clear() - with sentry_sdk.start_transaction(name="profiling 1"): + with sentry_sdk.start_span(name="profiling 1"): assert get_profiler_id() is not None, "profiler should be running" with sentry_sdk.start_span(op="op"): time.sleep(0.1) @@ -472,7 +403,7 @@ def test_continuous_profiler_auto_start_and_stop_sampled( # a transaction immediately, it'll be part of the same chunk assert get_profiler_id() is not None, "profiler should be running" - with sentry_sdk.start_transaction(name="profiling 2"): + with sentry_sdk.start_span(name="profiling 2"): assert get_profiler_id() is not None, "profiler should be running" with sentry_sdk.start_span(op="op"): time.sleep(0.1) @@ -497,8 +428,7 @@ def test_continuous_profiler_auto_start_and_stop_sampled( @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) @mock.patch("sentry_sdk.profiler.continuous_profiler.PROFILE_BUFFER_SECONDS", 0.01) @@ -522,7 +452,7 @@ def test_continuous_profiler_auto_start_and_stop_unsampled( for _ in range(3): envelopes.clear() - with sentry_sdk.start_transaction(name="profiling"): + with sentry_sdk.start_span(name="profiling"): assert get_profiler_id() is None, "profiler should not be running" with sentry_sdk.start_span(op="op"): time.sleep(0.05) @@ -543,33 +473,15 @@ def test_continuous_profiler_auto_start_and_stop_unsampled( ), ], ) -@pytest.mark.parametrize( - ["start_profiler_func", "stop_profiler_func"], - [ - pytest.param( - start_profile_session, - stop_profile_session, - id="start_profile_session/stop_profile_session (deprecated)", - ), - pytest.param( - start_profiler, - stop_profiler, - id="start_profiler/stop_profiler", - ), - ], -) @pytest.mark.parametrize( "make_options", [ - pytest.param(get_client_options(True), id="non-experiment"), - pytest.param(get_client_options(False), id="experiment"), + pytest.param(get_client_options()), ], ) def test_continuous_profiler_manual_start_and_stop_noop_when_using_trace_lifecyle( sentry_init, mode, - start_profiler_func, - stop_profiler_func, class_name, make_options, teardown_profiling, @@ -585,11 +497,11 @@ def test_continuous_profiler_manual_start_and_stop_noop_when_using_trace_lifecyl with mock.patch( f"sentry_sdk.profiler.continuous_profiler.{class_name}.ensure_running" ) as mock_ensure_running: - start_profiler_func() + start_profiler() mock_ensure_running.assert_not_called() with mock.patch( f"sentry_sdk.profiler.continuous_profiler.{class_name}.teardown" ) as mock_teardown: - stop_profiler_func() + stop_profiler() mock_teardown.assert_not_called() diff --git a/tests/profiler/test_transaction_profiler.py b/tests/profiler/test_transaction_profiler.py index 142fd7d78c..97836d59d9 100644 --- a/tests/profiler/test_transaction_profiler.py +++ b/tests/profiler/test_transaction_profiler.py @@ -1,6 +1,5 @@ import inspect import os -import sentry_sdk import sys import threading import time @@ -10,7 +9,7 @@ import pytest -from sentry_sdk import start_transaction +from sentry_sdk import start_span from sentry_sdk.profiler.transaction_profiler import ( GeventScheduler, Profile, @@ -40,30 +39,13 @@ def process_test_sample(sample): return [(tid, (stack, stack)) for tid, stack in sample] -def non_experimental_options(mode=None, sample_rate=None): - return {"profiler_mode": mode, "profiles_sample_rate": sample_rate} - - -def experimental_options(mode=None, sample_rate=None): - return { - "_experiments": {"profiler_mode": mode, "profiles_sample_rate": sample_rate} - } - - @pytest.mark.parametrize( "mode", [pytest.param("foo")], ) -@pytest.mark.parametrize( - "make_options", - [ - pytest.param(experimental_options, id="experiment"), - pytest.param(non_experimental_options, id="non experimental"), - ], -) -def test_profiler_invalid_mode(mode, make_options, teardown_profiling): +def test_profiler_invalid_mode(mode, teardown_profiling): with pytest.raises(ValueError): - setup_profiler(make_options(mode)) + setup_profiler({"profiler_mode": mode}) @pytest.mark.parametrize( @@ -74,30 +56,16 @@ def test_profiler_invalid_mode(mode, make_options, teardown_profiling): pytest.param("gevent", marks=requires_gevent), ], ) -@pytest.mark.parametrize( - "make_options", - [ - pytest.param(experimental_options, id="experiment"), - pytest.param(non_experimental_options, id="non experimental"), - ], -) -def test_profiler_valid_mode(mode, make_options, teardown_profiling): +def test_profiler_valid_mode(mode, teardown_profiling): # should not raise any exceptions - setup_profiler(make_options(mode)) + setup_profiler({"profiler_mode": mode}) -@pytest.mark.parametrize( - "make_options", - [ - pytest.param(experimental_options, id="experiment"), - pytest.param(non_experimental_options, id="non experimental"), - ], -) -def test_profiler_setup_twice(make_options, teardown_profiling): +def test_profiler_setup_twice(teardown_profiling): # setting up the first time should return True to indicate success - assert setup_profiler(make_options()) + assert setup_profiler({}) # setting up the second time should return False to indicate no-op - assert not setup_profiler(make_options()) + assert not setup_profiler({}) @pytest.mark.parametrize( @@ -117,13 +85,6 @@ def test_profiler_setup_twice(make_options, teardown_profiling): pytest.param(None, 0, id="profiler not enabled"), ], ) -@pytest.mark.parametrize( - "make_options", - [ - pytest.param(experimental_options, id="experiment"), - pytest.param(non_experimental_options, id="non experimental"), - ], -) @mock.patch("sentry_sdk.profiler.transaction_profiler.PROFILE_MINIMUM_SAMPLES", 0) def test_profiles_sample_rate( sentry_init, @@ -132,15 +93,12 @@ def test_profiles_sample_rate( teardown_profiling, profiles_sample_rate, profile_count, - make_options, mode, ): - options = make_options(mode=mode, sample_rate=profiles_sample_rate) sentry_init( traces_sample_rate=1.0, - profiler_mode=options.get("profiler_mode"), - profiles_sample_rate=options.get("profiles_sample_rate"), - _experiments=options.get("_experiments", {}), + profiler_mode=mode, + profiles_sample_rate=profiles_sample_rate, ) envelopes = capture_envelopes() @@ -149,7 +107,7 @@ def test_profiles_sample_rate( with mock.patch( "sentry_sdk.profiler.transaction_profiler.random.random", return_value=0.5 ): - with start_transaction(name="profiling"): + with start_span(name="profiling"): pass items = defaultdict(list) @@ -212,6 +170,7 @@ def test_profiles_sampler( sentry_init( traces_sample_rate=1.0, profiles_sampler=profiles_sampler, + profiler_mode=mode, ) envelopes = capture_envelopes() @@ -220,7 +179,7 @@ def test_profiles_sampler( with mock.patch( "sentry_sdk.profiler.transaction_profiler.random.random", return_value=0.5 ): - with start_transaction(name="profiling"): + with start_span(name="profiling"): pass items = defaultdict(list) @@ -244,13 +203,13 @@ def test_minimum_unique_samples_required( ): sentry_init( traces_sample_rate=1.0, - _experiments={"profiles_sample_rate": 1.0}, + profiles_sample_rate=1.0, ) envelopes = capture_envelopes() record_lost_event_calls = capture_record_lost_event_calls() - with start_transaction(name="profiling"): + with start_span(name="profiling"): pass items = defaultdict(list) @@ -273,12 +232,12 @@ def test_profile_captured( ): sentry_init( traces_sample_rate=1.0, - _experiments={"profiles_sample_rate": 1.0}, + profiles_sample_rate=1.0, ) envelopes = capture_envelopes() - with start_transaction(name="profiling"): + with start_span(name="profiling"): time.sleep(0.05) items = defaultdict(list) @@ -817,24 +776,6 @@ def test_profile_processing( assert processed["samples"] == expected["samples"] -def test_hub_backwards_compatibility(suppress_deprecation_warnings): - hub = sentry_sdk.Hub() - - with pytest.warns(DeprecationWarning): - profile = Profile(True, 0, hub=hub) - - with pytest.warns(DeprecationWarning): - assert profile.hub is hub - - new_hub = sentry_sdk.Hub() - - with pytest.warns(DeprecationWarning): - profile.hub = new_hub - - with pytest.warns(DeprecationWarning): - assert profile.hub is new_hub - - def test_no_warning_without_hub(): with warnings.catch_warnings(): warnings.simplefilter("error") diff --git a/tests/test_ai_monitoring.py b/tests/test_ai_monitoring.py index 5e7c7432fa..9ecd75fc84 100644 --- a/tests/test_ai_monitoring.py +++ b/tests/test_ai_monitoring.py @@ -16,7 +16,7 @@ def tool(**kwargs): def pipeline(): tool() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="pipeline"): pipeline() transaction = events[0] @@ -43,7 +43,7 @@ def tool(**kwargs): def pipeline(): tool() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="pipeline"): pipeline(sentry_tags={"user": "colin"}, sentry_data={"some_data": "value"}) transaction = events[0] @@ -74,7 +74,7 @@ async def async_tool(**kwargs): async def async_pipeline(): await async_tool() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="async_pipeline"): await async_pipeline() transaction = events[0] @@ -102,7 +102,7 @@ async def async_tool(**kwargs): async def async_pipeline(): await async_tool() - with sentry_sdk.start_transaction(): + with sentry_sdk.start_span(name="async_pipeline"): await async_pipeline( sentry_tags={"user": "czyber"}, sentry_data={"some_data": "value"} ) diff --git a/tests/test_api.py b/tests/test_api.py index 08c295a5c4..ae88791f96 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,9 +1,7 @@ import pytest -import re from unittest import mock -import sentry_sdk from sentry_sdk import ( capture_exception, continue_trace, @@ -12,16 +10,15 @@ get_current_span, get_traceparent, is_initialized, - start_transaction, + start_span, set_tags, - configure_scope, - push_scope, get_global_scope, get_current_scope, get_isolation_scope, ) from sentry_sdk.client import Client, NonRecordingClient +from tests.conftest import SortedBaggage @pytest.mark.forked @@ -35,7 +32,7 @@ def test_get_current_span(): @pytest.mark.forked -def test_get_current_span_default_hub(sentry_init): +def test_get_current_span_current_scope(sentry_init): sentry_init() assert get_current_span() is None @@ -48,23 +45,23 @@ def test_get_current_span_default_hub(sentry_init): @pytest.mark.forked -def test_get_current_span_default_hub_with_transaction(sentry_init): +def test_get_current_span_current_scope_with_span(sentry_init): sentry_init() assert get_current_span() is None - with start_transaction() as new_transaction: - assert get_current_span() == new_transaction + with start_span() as new_span: + assert get_current_span() == new_span @pytest.mark.forked def test_traceparent_with_tracing_enabled(sentry_init): sentry_init(traces_sample_rate=1.0) - with start_transaction() as transaction: + with start_span() as span: expected_traceparent = "%s-%s-1" % ( - transaction.trace_id, - transaction.span_id, + span.trace_id, + span.span_id, ) assert get_traceparent() == expected_traceparent @@ -90,44 +87,42 @@ def test_baggage_with_tracing_disabled(sentry_init): propagation_context.trace_id ) ) - assert get_baggage() == expected_baggage + assert get_baggage() == SortedBaggage(expected_baggage) @pytest.mark.forked def test_baggage_with_tracing_enabled(sentry_init): sentry_init(traces_sample_rate=1.0, release="1.0.0", environment="dev") - with start_transaction() as transaction: - expected_baggage_re = r"^sentry-trace_id={},sentry-sample_rand=0\.\d{{6}},sentry-environment=dev,sentry-release=1\.0\.0,sentry-sample_rate=1\.0,sentry-sampled={}$".format( - transaction.trace_id, "true" if transaction.sampled else "false" - ) - assert re.match(expected_baggage_re, get_baggage()) + with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.111111): + with start_span(name="foo") as span: + expected_baggage = f"sentry-transaction=foo,sentry-trace_id={span.trace_id},sentry-sample_rand=0.111111,sentry-environment=dev,sentry-release=1.0.0,sentry-sample_rate=1.0,sentry-sampled=true" # noqa: E231 + assert get_baggage() == SortedBaggage(expected_baggage) @pytest.mark.forked def test_continue_trace(sentry_init): - sentry_init() + sentry_init(traces_sample_rate=1.0) trace_id = "471a43a4192642f0b136d5159a501701" parent_span_id = "6e8f22c393e68f19" parent_sampled = 1 - transaction = continue_trace( + + with continue_trace( { "sentry-trace": "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled), - "baggage": "sentry-trace_id=566e3688a61d4bc888951642d6f14a19,sentry-sample_rand=0.123456", + "baggage": "sentry-trace_id=566e3688a61d4bc888951642d6f14a19,sentry-sample_rand=0.123456", # noqa: E231 }, - name="some name", - ) - with start_transaction(transaction): - assert transaction.name == "some name" - - propagation_context = get_isolation_scope()._propagation_context - assert propagation_context.trace_id == transaction.trace_id == trace_id - assert propagation_context.parent_span_id == parent_span_id - assert propagation_context.parent_sampled == parent_sampled - assert propagation_context.dynamic_sampling_context == { - "trace_id": "566e3688a61d4bc888951642d6f14a19", - "sample_rand": "0.123456", - } + ): + with start_span(name="some name") as span: + assert span.name == "some name" + propagation_context = get_isolation_scope()._propagation_context + assert propagation_context.trace_id == span.trace_id == trace_id + assert propagation_context.parent_span_id == parent_span_id + assert propagation_context.parent_sampled == parent_sampled + assert propagation_context.dynamic_sampling_context == { + "trace_id": "566e3688a61d4bc888951642d6f14a19", + "sample_rand": "0.123456", + } @pytest.mark.forked @@ -187,31 +182,3 @@ def test_set_tags(sentry_init, capture_events): "tag2": "updated", "tag3": "new", }, "Updating tags with empty dict changed tags" - - -def test_configure_scope_deprecation(): - with pytest.warns(DeprecationWarning): - with configure_scope(): - ... - - -def test_push_scope_deprecation(): - with pytest.warns(DeprecationWarning): - with push_scope(): - ... - - -def test_init_context_manager_deprecation(): - with pytest.warns(DeprecationWarning): - with sentry_sdk.init(): - ... - - -def test_init_enter_deprecation(): - with pytest.warns(DeprecationWarning): - sentry_sdk.init().__enter__() - - -def test_init_exit_deprecation(): - with pytest.warns(DeprecationWarning): - sentry_sdk.init().__exit__(None, None, None) diff --git a/tests/test_basics.py b/tests/test_basics.py index 0fdf9f811f..f46d2a15ce 100644 --- a/tests/test_basics.py +++ b/tests/test_basics.py @@ -1,29 +1,25 @@ -import datetime import importlib import logging import os import sys import time from collections import Counter +from datetime import datetime, timedelta, timezone import pytest -from sentry_sdk.client import Client -from sentry_sdk.utils import datetime_from_isoformat import sentry_sdk import sentry_sdk.scope from sentry_sdk import ( get_client, - push_scope, capture_event, capture_exception, capture_message, - start_transaction, + start_span, last_event_id, add_breadcrumb, isolation_scope, new_scope, - Hub, ) from sentry_sdk.integrations import ( _AUTO_ENABLING_INTEGRATIONS, @@ -35,8 +31,7 @@ from sentry_sdk.integrations.logging import LoggingIntegration from sentry_sdk.integrations.stdlib import StdlibIntegration from sentry_sdk.scope import add_global_event_processor -from sentry_sdk.utils import get_sdk_name, reraise -from sentry_sdk.tracing_utils import has_tracing_enabled +from sentry_sdk.utils import datetime_from_isoformat, get_sdk_name, reraise class NoOpIntegration(Integration): @@ -177,7 +172,7 @@ def before_send_transaction(event, hint): traces_sample_rate=1.0, ) events = capture_events() - transaction = start_transaction(name="foo") + transaction = start_span(name="foo") transaction.finish() (event,) = events @@ -194,7 +189,7 @@ def before_send_transaction_discard(event, hint): traces_sample_rate=1.0, ) events = capture_events() - transaction = start_transaction(name="foo") + transaction = start_span(name="foo") transaction.finish() assert len(events) == 0 @@ -252,32 +247,6 @@ def do_this(): assert crumb["type"] == "default" -@pytest.mark.parametrize( - "enable_tracing, traces_sample_rate, tracing_enabled, updated_traces_sample_rate", - [ - (None, None, False, None), - (False, 0.0, False, 0.0), - (False, 1.0, False, 1.0), - (None, 1.0, True, 1.0), - (True, 1.0, True, 1.0), - (None, 0.0, True, 0.0), # We use this as - it's configured but turned off - (True, 0.0, True, 0.0), # We use this as - it's configured but turned off - (True, None, True, 1.0), - ], -) -def test_option_enable_tracing( - sentry_init, - enable_tracing, - traces_sample_rate, - tracing_enabled, - updated_traces_sample_rate, -): - sentry_init(enable_tracing=enable_tracing, traces_sample_rate=traces_sample_rate) - options = sentry_sdk.get_client().options - assert has_tracing_enabled(options) is tracing_enabled - assert options["traces_sample_rate"] == updated_traces_sample_rate - - def test_breadcrumb_arguments(sentry_init, capture_events): assert_hint = {"bar": 42} @@ -297,76 +266,6 @@ def before_breadcrumb(crumb, hint): add_breadcrumb(crumb=dict(foo=42)) -def test_push_scope(sentry_init, capture_events, suppress_deprecation_warnings): - sentry_init() - events = capture_events() - - with push_scope() as scope: - scope.level = "warning" - try: - 1 / 0 - except Exception as e: - capture_exception(e) - - (event,) = events - - assert event["level"] == "warning" - assert "exception" in event - - -def test_push_scope_null_client( - sentry_init, capture_events, suppress_deprecation_warnings -): - """ - This test can be removed when we remove push_scope and the Hub from the SDK. - """ - sentry_init() - events = capture_events() - - Hub.current.bind_client(None) - - with push_scope() as scope: - scope.level = "warning" - try: - 1 / 0 - except Exception as e: - capture_exception(e) - - assert len(events) == 0 - - -@pytest.mark.skip( - reason="This test is not valid anymore, because push_scope just returns the isolation scope. This test should be removed once the Hub is removed" -) -@pytest.mark.parametrize("null_client", (True, False)) -def test_push_scope_callback(sentry_init, null_client, capture_events): - """ - This test can be removed when we remove push_scope and the Hub from the SDK. - """ - sentry_init() - - if null_client: - Hub.current.bind_client(None) - - outer_scope = Hub.current.scope - - calls = [] - - @push_scope - def _(scope): - assert scope is Hub.current.scope - assert scope is not outer_scope - calls.append(1) - - # push_scope always needs to execute the callback regardless of - # client state, because that actually runs usercode in it, not - # just scope config code - assert calls == [1] - - # Assert scope gets popped correctly - assert Hub.current.scope is outer_scope - - def test_breadcrumbs(sentry_init, capture_events): sentry_init(max_breadcrumbs=10) events = capture_events() @@ -400,12 +299,12 @@ def test_breadcrumbs(sentry_init, capture_events): def test_breadcrumb_ordering(sentry_init, capture_events): sentry_init() events = capture_events() - now = datetime.datetime.now(datetime.timezone.utc).replace(microsecond=0) + now = datetime.now(timezone.utc).replace(microsecond=0) timestamps = [ - now - datetime.timedelta(days=10), - now - datetime.timedelta(days=8), - now - datetime.timedelta(days=12), + now - timedelta(days=10), + now - timedelta(days=8), + now - timedelta(days=12), ] for timestamp in timestamps: @@ -429,24 +328,24 @@ def test_breadcrumb_ordering(sentry_init, capture_events): def test_breadcrumb_ordering_different_types(sentry_init, capture_events): sentry_init() events = capture_events() - now = datetime.datetime.now(datetime.timezone.utc) + now = datetime.now(timezone.utc) timestamps = [ - now - datetime.timedelta(days=10), - now - datetime.timedelta(days=8), - now.replace(microsecond=0) - datetime.timedelta(days=12), - now - datetime.timedelta(days=9), - now - datetime.timedelta(days=13), - now.replace(microsecond=0) - datetime.timedelta(days=11), + now - timedelta(days=10), + now - timedelta(days=8), + now.replace(microsecond=0) - timedelta(days=12), + now - timedelta(days=9), + now - timedelta(days=13), + now.replace(microsecond=0) - timedelta(days=11), ] breadcrumb_timestamps = [ timestamps[0], timestamps[1].isoformat(), - datetime.datetime.strftime(timestamps[2], "%Y-%m-%dT%H:%M:%S") + "Z", - datetime.datetime.strftime(timestamps[3], "%Y-%m-%dT%H:%M:%S.%f") + "+00:00", - datetime.datetime.strftime(timestamps[4], "%Y-%m-%dT%H:%M:%S.%f") + "+0000", - datetime.datetime.strftime(timestamps[5], "%Y-%m-%dT%H:%M:%S.%f") + "-0000", + datetime.strftime(timestamps[2], "%Y-%m-%dT%H:%M:%S") + "Z", + datetime.strftime(timestamps[3], "%Y-%m-%dT%H:%M:%S.%f") + "+00:00", + datetime.strftime(timestamps[4], "%Y-%m-%dT%H:%M:%S.%f") + "+0000", + datetime.strftime(timestamps[5], "%Y-%m-%dT%H:%M:%S.%f") + "-0000", ] for i, timestamp in enumerate(timestamps): @@ -577,71 +476,6 @@ def test_integrations( } == expected_integrations -@pytest.mark.skip( - reason="This test is not valid anymore, because with the new Scopes calling bind_client on the Hub sets the client on the global scope. This test should be removed once the Hub is removed" -) -def test_client_initialized_within_scope(sentry_init, caplog): - """ - This test can be removed when we remove push_scope and the Hub from the SDK. - """ - caplog.set_level(logging.WARNING) - - sentry_init() - - with push_scope(): - Hub.current.bind_client(Client()) - - (record,) = (x for x in caplog.records if x.levelname == "WARNING") - - assert record.msg.startswith("init() called inside of pushed scope.") - - -@pytest.mark.skip( - reason="This test is not valid anymore, because with the new Scopes the push_scope just returns the isolation scope. This test should be removed once the Hub is removed" -) -def test_scope_leaks_cleaned_up(sentry_init, caplog): - """ - This test can be removed when we remove push_scope and the Hub from the SDK. - """ - caplog.set_level(logging.WARNING) - - sentry_init() - - old_stack = list(Hub.current._stack) - - with push_scope(): - push_scope() - - assert Hub.current._stack == old_stack - - (record,) = (x for x in caplog.records if x.levelname == "WARNING") - - assert record.message.startswith("Leaked 1 scopes:") - - -@pytest.mark.skip( - reason="This test is not valid anymore, because with the new Scopes there is not pushing and popping of scopes. This test should be removed once the Hub is removed" -) -def test_scope_popped_too_soon(sentry_init, caplog): - """ - This test can be removed when we remove push_scope and the Hub from the SDK. - """ - caplog.set_level(logging.ERROR) - - sentry_init() - - old_stack = list(Hub.current._stack) - - with push_scope(): - Hub.current.pop_scope_unsafe() - - assert Hub.current._stack == old_stack - - (record,) = (x for x in caplog.records if x.levelname == "ERROR") - - assert record.message == ("Scope popped too soon. Popped 1 scopes too many.") - - def test_scope_event_processor_order(sentry_init, capture_events): def before_send(event, hint): event["message"] += "baz" @@ -761,7 +595,7 @@ def foo(event, hint): capture_message("dropped") - with start_transaction(name="dropped"): + with start_span(name="dropped"): pass assert len(events) == 0 @@ -866,7 +700,7 @@ def test_functions_to_trace(sentry_init, capture_events): events = capture_events() - with start_transaction(name="something"): + with start_span(name="something"): time.sleep(0) for word in ["World", "You"]: @@ -902,7 +736,7 @@ def test_functions_to_trace_with_class(sentry_init, capture_events): events = capture_events() - with start_transaction(name="something"): + with start_span(name="something"): wg = WorldGreeter("World") wg.greet() wg.greet("You") @@ -947,7 +781,7 @@ def test_staticmethod_class_tracing(sentry_init, capture_events): events = capture_events() - with sentry_sdk.start_transaction(name="test"): + with sentry_sdk.start_span(name="test"): assert TracingTestClass.static(1) == 1 (event,) = events @@ -971,7 +805,7 @@ def test_staticmethod_instance_tracing(sentry_init, capture_events): events = capture_events() - with sentry_sdk.start_transaction(name="test"): + with sentry_sdk.start_span(name="test"): assert TracingTestClass().static(1) == 1 (event,) = events @@ -995,7 +829,7 @@ def test_classmethod_class_tracing(sentry_init, capture_events): events = capture_events() - with sentry_sdk.start_transaction(name="test"): + with sentry_sdk.start_span(name="test"): assert TracingTestClass.class_(1) == (TracingTestClass, 1) (event,) = events @@ -1019,7 +853,7 @@ def test_classmethod_instance_tracing(sentry_init, capture_events): events = capture_events() - with sentry_sdk.start_transaction(name="test"): + with sentry_sdk.start_span(name="test"): assert TracingTestClass().class_(1) == (TracingTestClass, 1) (event,) = events @@ -1031,7 +865,7 @@ def test_classmethod_instance_tracing(sentry_init, capture_events): def test_last_event_id(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) assert last_event_id() is None @@ -1041,42 +875,24 @@ def test_last_event_id(sentry_init): def test_last_event_id_transaction(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) assert last_event_id() is None - with start_transaction(name="test"): + with start_span(name="test"): pass assert last_event_id() is None, "Transaction should not set last_event_id" def test_last_event_id_scope(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) # Should not crash with isolation_scope() as scope: assert scope.last_event_id() is None -def test_hub_constructor_deprecation_warning(): - with pytest.warns(sentry_sdk.hub.SentryHubDeprecationWarning): - Hub() - - -def test_hub_current_deprecation_warning(): - with pytest.warns(sentry_sdk.hub.SentryHubDeprecationWarning) as warning_records: - Hub.current - - # Make sure we only issue one deprecation warning - assert len(warning_records) == 1 - - -def test_hub_main_deprecation_warnings(): - with pytest.warns(sentry_sdk.hub.SentryHubDeprecationWarning): - Hub.main - - @pytest.mark.skipif(sys.version_info < (3, 11), reason="add_note() not supported") def test_notes(sentry_init, capture_events): sentry_init() diff --git a/tests/test_breadcrumbs.py b/tests/test_breadcrumbs.py new file mode 100644 index 0000000000..391c24cfc7 --- /dev/null +++ b/tests/test_breadcrumbs.py @@ -0,0 +1,86 @@ +from unittest import mock + +import sentry_sdk + + +def test_breadcrumbs(sentry_init, capture_events): + """ + This test illustrates how breadcrumbs are added to the error event when an error occurs + """ + sentry_init( + traces_sample_rate=1.0, + ) + events = capture_events() + + add_breadcrumbs_kwargs = { + "type": "navigation", + "category": "unit_tests.breadcrumbs", + "level": "fatal", + "origin": "unit-tests", + "data": { + "string": "foobar", + "number": 4.2, + "array": [1, 2, 3], + "dict": {"foo": "bar"}, + }, + } + + with sentry_sdk.start_span(name="trx-breadcrumbs"): + sentry_sdk.add_breadcrumb(message="breadcrumb0", **add_breadcrumbs_kwargs) + + with sentry_sdk.start_span(name="span1", op="function"): + sentry_sdk.add_breadcrumb(message="breadcrumb1", **add_breadcrumbs_kwargs) + + with sentry_sdk.start_span(name="span2", op="function"): + sentry_sdk.add_breadcrumb( + message="breadcrumb2", **add_breadcrumbs_kwargs + ) + + with sentry_sdk.start_span(name="span3", op="function"): + sentry_sdk.add_breadcrumb( + message="breadcrumb3", **add_breadcrumbs_kwargs + ) + + try: + 1 / 0 + except ZeroDivisionError as ex: + sentry_sdk.capture_exception(ex) + + assert len(events) == 2 + error = events[0] + + breadcrumbs = error["breadcrumbs"]["values"] + + for crumb in breadcrumbs: + print(crumb) + + assert len(breadcrumbs) == 4 + + # Check for my custom breadcrumbs + for i in range(0, 3): + assert breadcrumbs[i]["message"] == f"breadcrumb{i}" + assert breadcrumbs[i]["type"] == "navigation" + assert breadcrumbs[i]["category"] == "unit_tests.breadcrumbs" + assert breadcrumbs[i]["level"] == "fatal" + assert breadcrumbs[i]["origin"] == "unit-tests" + assert breadcrumbs[i]["data"] == { + "string": "foobar", + "number": 4.2, + "array": [1, 2, 3], + "dict": {"foo": "bar"}, + } + assert breadcrumbs[i]["timestamp"] == mock.ANY + + # Check for custom breadcrumbs on span3 + assert breadcrumbs[3]["message"] == "breadcrumb3" + assert breadcrumbs[3]["type"] == "navigation" + assert breadcrumbs[3]["category"] == "unit_tests.breadcrumbs" + assert breadcrumbs[3]["level"] == "fatal" + assert breadcrumbs[3]["origin"] == "unit-tests" + assert breadcrumbs[3]["data"] == { + "string": "foobar", + "number": 4.2, + "array": [1, 2, 3], + "dict": {"foo": "bar"}, + } + assert breadcrumbs[3]["timestamp"] == mock.ANY diff --git a/tests/test_client.py b/tests/test_client.py index 67f53d989a..9b0b4c3bdb 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -12,10 +12,8 @@ import sentry_sdk from sentry_sdk import ( - Hub, Client, add_breadcrumb, - configure_scope, capture_message, capture_exception, capture_event, @@ -380,13 +378,6 @@ def test_socks_proxy(testcase, http2): ) -def test_simple_transport(sentry_init): - events = [] - sentry_init(transport=events.append) - capture_message("Hello World!") - assert events[0]["message"] == "Hello World!" - - def test_ignore_errors(sentry_init, capture_events): sentry_init(ignore_errors=[ZeroDivisionError]) events = capture_events() @@ -604,39 +595,12 @@ def capture_envelope(self, envelope): ) start = time.time() - output = subprocess.check_output([sys.executable, str(app)]) + subprocess.check_output([sys.executable, str(app)]) end = time.time() # Each message takes at least 0.1 seconds to process assert int(end - start) >= num_messages / 10 - assert output.count(b"HI") == num_messages - - -def test_configure_scope_available( - sentry_init, request, monkeypatch, suppress_deprecation_warnings -): - """ - Test that scope is configured if client is configured - - This test can be removed once configure_scope and the Hub are removed. - """ - sentry_init() - - with configure_scope() as scope: - assert scope is Hub.current.scope - scope.set_tag("foo", "bar") - - calls = [] - - def callback(scope): - calls.append(scope) - scope.set_tag("foo", "bar") - - assert configure_scope(callback) is None - assert len(calls) == 1 - assert calls[0] is Hub.current.scope - @pytest.mark.tests_internal_exceptions def test_client_debug_option_enabled(sentry_init, caplog): @@ -656,27 +620,6 @@ def test_client_debug_option_disabled(with_client, sentry_init, caplog): assert "OK" not in caplog.text -@pytest.mark.skip( - reason="New behavior in SDK 2.0: You have a scope before init and add data to it." -) -def test_scope_initialized_before_client(sentry_init, capture_events): - """ - This is a consequence of how configure_scope() works. We must - make `configure_scope()` a noop if no client is configured. Even - if the user later configures a client: We don't know that. - """ - with configure_scope() as scope: - scope.set_tag("foo", 42) - - sentry_init() - - events = capture_events() - capture_message("hi") - (event,) = events - - assert "tags" not in event - - def test_weird_chars(sentry_init, capture_events): sentry_init() events = capture_events() @@ -1490,9 +1433,3 @@ def run(self, sentry_init, capture_record_lost_event_calls): ) def test_dropped_transaction(sentry_init, capture_record_lost_event_calls, test_config): test_config.run(sentry_init, capture_record_lost_event_calls) - - -@pytest.mark.parametrize("enable_tracing", [True, False]) -def test_enable_tracing_deprecated(sentry_init, enable_tracing): - with pytest.warns(DeprecationWarning): - sentry_init(enable_tracing=enable_tracing) diff --git a/tests/test_dsc.py b/tests/test_dsc.py index 8e549d0cf8..ea3c0b8988 100644 --- a/tests/test_dsc.py +++ b/tests/test_dsc.py @@ -29,8 +29,8 @@ def test_dsc_head_of_trace(sentry_init, capture_envelopes): ) envelopes = capture_envelopes() - # We start a new transaction - with sentry_sdk.start_transaction(name="foo"): + # We start a new root_span + with sentry_sdk.start_span(name="foo"): pass assert len(envelopes) == 1 @@ -97,10 +97,10 @@ def test_dsc_continuation_of_trace(sentry_init, capture_envelopes): "HTTP_BAGGAGE": baggage, } - # We continue the incoming trace and start a new transaction - transaction = sentry_sdk.continue_trace(incoming_http_headers) - with sentry_sdk.start_transaction(transaction, name="foo"): - pass + # We continue the incoming trace and start a new root span + with sentry_sdk.continue_trace(incoming_http_headers): + with sentry_sdk.start_span(name="foo"): + pass assert len(envelopes) == 1 @@ -117,7 +117,7 @@ def test_dsc_continuation_of_trace(sentry_init, capture_envelopes): assert "sample_rate" in envelope_trace_header assert type(envelope_trace_header["sample_rate"]) == str - assert envelope_trace_header["sample_rate"] == "1.0" + assert envelope_trace_header["sample_rate"] == "0.01337" assert "sampled" in envelope_trace_header assert type(envelope_trace_header["sampled"]) == str @@ -137,7 +137,7 @@ def test_dsc_continuation_of_trace(sentry_init, capture_envelopes): def test_dsc_continuation_of_trace_sample_rate_changed_in_traces_sampler( - sentry_init, capture_envelopes + sentry_init, capture_envelopes, monkeypatch ): """ Another service calls our service and passes tracing information to us. @@ -176,9 +176,9 @@ def my_traces_sampler(sampling_context): # We continue the incoming trace and start a new transaction with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.125): - transaction = sentry_sdk.continue_trace(incoming_http_headers) - with sentry_sdk.start_transaction(transaction, name="foo"): - pass + with sentry_sdk.continue_trace(incoming_http_headers): + with sentry_sdk.start_span(name="foo"): + pass assert len(envelopes) == 1 @@ -214,6 +214,213 @@ def my_traces_sampler(sampling_context): assert envelope_trace_header["transaction"] == "bar" +@pytest.mark.parametrize( + "test_data, expected_sample_rate, expected_sampled", + [ + # Test data: + # "incoming_sample_rate": + # The "sentry-sample_rate" in the incoming `baggage` header. + # "incoming_sampled": + # The "sentry-sampled" in the incoming `baggage` header. + # "sentry_trace_header_parent_sampled": + # The number at the end in the `sentry-trace` header, called "parent_sampled". + # "use_local_traces_sampler": + # Whether the local traces sampler is used. + # "local_traces_sampler_result": + # The result of the local traces sampler. + # "local_traces_sample_rate": + # The `traces_sample_rate` setting in the local `sentry_init` call. + ( # 1 traces_sample_rate does not override incoming + { + "incoming_sample_rate": 1.0, + "incoming_sampled": "true", + "sentry_trace_header_parent_sampled": 1, + "use_local_traces_sampler": False, + "local_traces_sampler_result": None, + "local_traces_sample_rate": 0.7, + }, + 1.0, # expected_sample_rate + "true", # expected_sampled + ), + ( # 2 traces_sampler overrides incoming + { + "incoming_sample_rate": 1.0, + "incoming_sampled": "true", + "sentry_trace_header_parent_sampled": 1, + "use_local_traces_sampler": True, + "local_traces_sampler_result": 0.5, + "local_traces_sample_rate": 0.7, + }, + 0.5, # expected_sample_rate + "true", # expected_sampled + ), + ( # 3 traces_sample_rate does not overrides incoming sample rate or parent (incoming not sampled) + { + "incoming_sample_rate": 1.0, + "incoming_sampled": "false", + "sentry_trace_header_parent_sampled": 0, + "use_local_traces_sampler": False, + "local_traces_sampler_result": None, + "local_traces_sample_rate": 0.7, + }, + None, # expected_sample_rate + "tracing-disabled-no-transactions-should-be-sent", # expected_sampled (because the parent sampled is 0) + ), + ( # 4 traces_sampler overrides incoming (incoming not sampled) + { + "incoming_sample_rate": 0.3, + "incoming_sampled": "false", + "sentry_trace_header_parent_sampled": 0, + "use_local_traces_sampler": True, + "local_traces_sampler_result": 0.25, + "local_traces_sample_rate": 0.7, + }, + 0.25, # expected_sample_rate + "false", # expected_sampled (traces sampler can override parent sampled) + ), + ( # 5 forwarding incoming (traces_sample_rate not set) + { + "incoming_sample_rate": 1.0, + "incoming_sampled": "true", + "sentry_trace_header_parent_sampled": 1, + "use_local_traces_sampler": False, + "local_traces_sampler_result": None, + "local_traces_sample_rate": None, + }, + None, # expected_sample_rate + "tracing-disabled-no-transactions-should-be-sent", # expected_sampled (traces_sample_rate=None disables all transaction creation) + ), + ( # 6 traces_sampler overrides incoming (traces_sample_rate not set) + { + "incoming_sample_rate": 1.0, + "incoming_sampled": "true", + "sentry_trace_header_parent_sampled": 1, + "use_local_traces_sampler": True, + "local_traces_sampler_result": 0.5, + "local_traces_sample_rate": None, + }, + 0.5, # expected_sample_rate + "true", # expected_sampled (traces sampler overrides the traces_sample_rate setting, so transactions are created) + ), + ( # 7 forwarding incoming (traces_sample_rate not set) (incoming not sampled) + { + "incoming_sample_rate": 1.0, + "incoming_sampled": "false", + "sentry_trace_header_parent_sampled": 0, + "use_local_traces_sampler": False, + "local_traces_sampler_result": None, + "local_traces_sample_rate": None, + }, + None, # expected_sample_rate + "tracing-disabled-no-transactions-should-be-sent", # expected_sampled (traces_sample_rate=None disables all transaction creation) + ), + ( # 8 traces_sampler overrides incoming (traces_sample_rate not set) (incoming not sampled) + { + "incoming_sample_rate": 0.3, + "incoming_sampled": "false", + "sentry_trace_header_parent_sampled": 0, + "use_local_traces_sampler": True, + "local_traces_sampler_result": 0.25, + "local_traces_sample_rate": None, + }, + 0.25, # expected_sample_rate + "false", # expected_sampled + ), + ( # 9 traces_sample_rate overrides incoming (upstream deferred sampling decision) + { + "incoming_sample_rate": 1.0, + "incoming_sampled": None, + "sentry_trace_header_parent_sampled": None, + "use_local_traces_sampler": False, + "local_traces_sampler_result": 0.5, + "local_traces_sample_rate": 0.7, + }, + 0.7, # expected_sample_rate + "true", # expected_sampled + ), + ], + ids=( + "1 traces_sample_rate does not override incoming", + "2 traces_sampler overrides incoming", + "3 traces_sample_rate does not overrides incoming sample rate or parent (incoming not sampled)", + "4 traces_sampler overrides incoming (incoming not sampled)", + "5 forwarding incoming (traces_sample_rate not set)", + "6 traces_sampler overrides incoming (traces_sample_rate not set)", + "7 forwarding incoming (traces_sample_rate not set) (incoming not sampled)", + "8 traces_sampler overrides incoming (traces_sample_rate not set) (incoming not sampled)", + "9 traces_sample_rate overrides incoming (upstream deferred sampling decision)", + ), +) +def test_dsc_sample_rate_change( + sentry_init, + capture_envelopes, + test_data, + expected_sample_rate, + expected_sampled, +): + """ + Another service calls our service and passes tracing information to us. + Our service is continuing the trace, but modifies the sample rate. + The DSC in transaction envelopes should contain the updated sample rate. + """ + + def my_traces_sampler(sampling_context): + return test_data["local_traces_sampler_result"] + + init_kwargs = { + "dsn": "https://mysecret@bla.ingest.sentry.io/12312012", + "release": "myapp@0.0.1", + "environment": "canary", + } + + if test_data["local_traces_sample_rate"]: + init_kwargs["traces_sample_rate"] = test_data["local_traces_sample_rate"] + + if test_data["use_local_traces_sampler"]: + init_kwargs["traces_sampler"] = my_traces_sampler + + sentry_init(**init_kwargs) + envelopes = capture_envelopes() + + # This is what the upstream service sends us + incoming_trace_id = "771a43a4192642f0b136d5159a501700" + if test_data["sentry_trace_header_parent_sampled"] is None: + sentry_trace = f"{incoming_trace_id}-1234567890abcdef" + else: + sentry_trace = f"{incoming_trace_id}-1234567890abcdef-{test_data['sentry_trace_header_parent_sampled']}" + + baggage = ( + f"sentry-trace_id={incoming_trace_id}, " + f"sentry-sample_rate={str(test_data['incoming_sample_rate'])}, " + f"sentry-sampled={test_data['incoming_sampled']}, " + "sentry-public_key=frontendpublickey, " + "sentry-release=myapp@0.0.1, " + "sentry-environment=prod, " + "sentry-transaction=foo, " + ) + incoming_http_headers = { + "HTTP_SENTRY_TRACE": sentry_trace, + "HTTP_BAGGAGE": baggage, + } + + # We continue the incoming trace and start a new transaction + with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.2): + with sentry_sdk.continue_trace(incoming_http_headers): + with sentry_sdk.start_span(name="foo"): + pass + + if expected_sampled == "tracing-disabled-no-transactions-should-be-sent": + assert len(envelopes) == 0 + else: + assert len(envelopes) == 1 + transaction_envelope = envelopes[0] + dsc_in_envelope_header = transaction_envelope.headers["trace"] + + assert dsc_in_envelope_header["sample_rate"] == str(expected_sample_rate) + assert dsc_in_envelope_header["sampled"] == str(expected_sampled).lower() + assert dsc_in_envelope_header["trace_id"] == incoming_trace_id + + def test_dsc_issue(sentry_init, capture_envelopes): """ Our service is a standalone service that does not have tracing enabled. Just uses Sentry for error reporting. @@ -225,7 +432,7 @@ def test_dsc_issue(sentry_init, capture_envelopes): ) envelopes = capture_envelopes() - # No transaction is started, just an error is captured + # No root span is started, just an error is captured try: 1 / 0 except ZeroDivisionError as exp: @@ -261,8 +468,8 @@ def test_dsc_issue(sentry_init, capture_envelopes): def test_dsc_issue_with_tracing(sentry_init, capture_envelopes): """ - Our service has tracing enabled and an error occurs in an transaction. - Envelopes containing errors also have the same DSC than the transaction envelopes. + Our service has tracing enabled and an error occurs in an root span. + Envelopes containing errors also have the same DSC than the root span envelopes. """ sentry_init( dsn="https://mysecret@bla.ingest.sentry.io/12312012", @@ -272,8 +479,8 @@ def test_dsc_issue_with_tracing(sentry_init, capture_envelopes): ) envelopes = capture_envelopes() - # We start a new transaction and an error occurs - with sentry_sdk.start_transaction(name="foo"): + # We start a new root span and an error occurs + with sentry_sdk.start_span(name="foo"): try: 1 / 0 except ZeroDivisionError as exp: @@ -319,7 +526,7 @@ def test_dsc_issue_with_tracing(sentry_init, capture_envelopes): "traces_sample_rate", [ 0, # no traces will be started, but if incoming traces will be continued (by our instrumentations, not happening in this test) - None, # no tracing at all. This service will never create transactions. + None, # no tracing at all. This service will never create root spans. ], ) def test_dsc_issue_twp(sentry_init, capture_envelopes, traces_sample_rate): @@ -358,14 +565,14 @@ def test_dsc_issue_twp(sentry_init, capture_envelopes, traces_sample_rate): } # We continue the trace (meaning: saving the incoming trace information on the scope) - # but in this test, we do not start a transaction. - sentry_sdk.continue_trace(incoming_http_headers) + # but in this test, we do not start a root span. + with sentry_sdk.continue_trace(incoming_http_headers): - # No transaction is started, just an error is captured - try: - 1 / 0 - except ZeroDivisionError as exp: - sentry_sdk.capture_exception(exp) + # No root span is started, just an error is captured + try: + 1 / 0 + except ZeroDivisionError as exp: + sentry_sdk.capture_exception(exp) assert len(envelopes) == 1 diff --git a/tests/test_exceptiongroup.py b/tests/test_exceptiongroup.py index 4c7afc58eb..01ec0a78d4 100644 --- a/tests/test_exceptiongroup.py +++ b/tests/test_exceptiongroup.py @@ -217,7 +217,10 @@ def test_exception_chain_cause(): { "mechanism": { "handled": False, - "type": "test_suite", + "type": "chained", + "exception_id": 1, + "parent_id": 0, + "source": "__cause__", }, "module": None, "type": "TypeError", @@ -227,6 +230,7 @@ def test_exception_chain_cause(): "mechanism": { "handled": False, "type": "test_suite", + "exception_id": 0, }, "module": None, "type": "ValueError", @@ -257,7 +261,10 @@ def test_exception_chain_context(): { "mechanism": { "handled": False, - "type": "test_suite", + "type": "chained", + "exception_id": 1, + "parent_id": 0, + "source": "__context__", }, "module": None, "type": "TypeError", @@ -267,6 +274,7 @@ def test_exception_chain_context(): "mechanism": { "handled": False, "type": "test_suite", + "exception_id": 0, }, "module": None, "type": "ValueError", @@ -297,6 +305,7 @@ def test_simple_exception(): "mechanism": { "handled": False, "type": "test_suite", + "exception_id": 0, }, "module": None, "type": "ValueError", diff --git a/tests/test_feature_flags.py b/tests/test_feature_flags.py index 1b0ed13d49..5c2f1cd352 100644 --- a/tests/test_feature_flags.py +++ b/tests/test_feature_flags.py @@ -259,3 +259,19 @@ def test_flag_limit(sentry_init, capture_events): } ) assert "flag.evaluation.10" not in event["spans"][0]["data"] + + +def test_flag_counter_not_sent(sentry_init, capture_events): + sentry_init(traces_sample_rate=1.0) + + events = capture_events() + + with start_transaction(name="hi"): + with start_span(op="foo", name="bar"): + add_feature_flag("0", True) + add_feature_flag("1", True) + add_feature_flag("2", True) + add_feature_flag("3", True) + + (event,) = events + assert "_flag.count" not in event["spans"][0]["data"] diff --git a/tests/test_logs.py b/tests/test_logs.py index 49ffd31ec7..dfe2284fa6 100644 --- a/tests/test_logs.py +++ b/tests/test_logs.py @@ -236,15 +236,15 @@ def test_logs_message_params(sentry_init, capture_envelopes): @minimum_python_37 -def test_logs_tied_to_transactions(sentry_init, capture_envelopes): +def test_logs_tied_to_root_spans(sentry_init, capture_envelopes): """ - Log messages are also tied to transactions. + Log messages are also tied to root spans. """ sentry_init(_experiments={"enable_logs": True}) envelopes = capture_envelopes() - with sentry_sdk.start_transaction(name="test-transaction") as trx: - sentry_sdk.logger.warning("This is a log tied to a transaction") + with sentry_sdk.start_span(name="test-root-span") as trx: + sentry_sdk.logger.warning("This is a log tied to a root span") get_client().flush() logs = envelopes_to_logs(envelopes) @@ -259,9 +259,9 @@ def test_logs_tied_to_spans(sentry_init, capture_envelopes): sentry_init(_experiments={"enable_logs": True}) envelopes = capture_envelopes() - with sentry_sdk.start_transaction(name="test-transaction"): + with sentry_sdk.start_span(name="test-root-span"): with sentry_sdk.start_span(name="test-span") as span: - sentry_sdk.logger.warning("This is a log tied to a span") + sentry_sdk.logger.warning("This is a log tied to a child span") get_client().flush() logs = envelopes_to_logs(envelopes) @@ -333,7 +333,10 @@ def test_logging_errors(sentry_init, capture_envelopes): """ The python logger module should be able to log errors without erroring """ - sentry_init(_experiments={"enable_logs": True}) + sentry_init( + _experiments={"enable_logs": True}, + integrations=[LoggingIntegration(event_level="ERROR")], + ) envelopes = capture_envelopes() python_logger = logging.Logger("test-logger") diff --git a/tests/test_metrics.py b/tests/test_metrics.py deleted file mode 100644 index c02f075288..0000000000 --- a/tests/test_metrics.py +++ /dev/null @@ -1,971 +0,0 @@ -import sys -import time -import linecache -from unittest import mock - -import pytest - -import sentry_sdk -from sentry_sdk import metrics -from sentry_sdk.tracing import TransactionSource -from sentry_sdk.envelope import parse_json - -try: - import gevent -except ImportError: - gevent = None - - -minimum_python_37_with_gevent = pytest.mark.skipif( - gevent and sys.version_info < (3, 7), - reason="Require Python 3.7 or higher with gevent", -) - - -def parse_metrics(bytes): - rv = [] - for line in bytes.splitlines(): - pieces = line.decode("utf-8").split("|") - payload = pieces[0].split(":") - name = payload[0] - values = payload[1:] - ty = pieces[1] - ts = None - tags = {} - for piece in pieces[2:]: - if piece[0] == "#": - for pair in piece[1:].split(","): - k, v = pair.split(":", 1) - old = tags.get(k) - if old is not None: - if isinstance(old, list): - old.append(v) - else: - tags[k] = [old, v] - else: - tags[k] = v - elif piece[0] == "T": - ts = int(piece[1:]) - else: - raise ValueError("unknown piece %r" % (piece,)) - rv.append((ts, name, ty, values, tags)) - rv.sort(key=lambda x: (x[0], x[1], tuple(sorted(tags.items())))) - return rv - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_increment(sentry_init, capture_envelopes, maybe_monkeypatched_threading): - sentry_init( - release="fun-release", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": True}, - ) - ts = time.time() - envelopes = capture_envelopes() - - metrics.increment("foobar", 1.0, tags={"foo": "bar", "blub": "blah"}, timestamp=ts) - # python specific alias - metrics.incr("foobar", 2.0, tags={"foo": "bar", "blub": "blah"}, timestamp=ts) - sentry_sdk.flush() - - (envelope,) = envelopes - statsd_item, meta_item = envelope.items - - assert statsd_item.headers["type"] == "statsd" - m = parse_metrics(statsd_item.payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "foobar@none" - assert m[0][2] == "c" - assert m[0][3] == ["3.0"] - assert m[0][4] == { - "blub": "blah", - "foo": "bar", - "release": "fun-release", - "environment": "not-fun-env", - } - - assert meta_item.headers["type"] == "metric_meta" - assert parse_json(meta_item.payload.get_bytes()) == { - "timestamp": mock.ANY, - "mapping": { - "c:foobar@none": [ - { - "type": "location", - "filename": "tests/test_metrics.py", - "abs_path": __file__, - "function": sys._getframe().f_code.co_name, - "module": __name__, - "lineno": mock.ANY, - "pre_context": mock.ANY, - "context_line": mock.ANY, - "post_context": mock.ANY, - } - ] - }, - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_timing(sentry_init, capture_envelopes, maybe_monkeypatched_threading): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": True}, - ) - ts = time.time() - envelopes = capture_envelopes() - - with metrics.timing("whatever", tags={"blub": "blah"}, timestamp=ts): - time.sleep(0.1) - sentry_sdk.flush() - - (envelope,) = envelopes - statsd_item, meta_item = envelope.items - - assert statsd_item.headers["type"] == "statsd" - m = parse_metrics(statsd_item.payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "whatever@second" - assert m[0][2] == "d" - assert len(m[0][3]) == 1 - assert float(m[0][3][0]) >= 0.1 - assert m[0][4] == { - "blub": "blah", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert meta_item.headers["type"] == "metric_meta" - json = parse_json(meta_item.payload.get_bytes()) - assert json == { - "timestamp": mock.ANY, - "mapping": { - "d:whatever@second": [ - { - "type": "location", - "filename": "tests/test_metrics.py", - "abs_path": __file__, - "function": sys._getframe().f_code.co_name, - "module": __name__, - "lineno": mock.ANY, - "pre_context": mock.ANY, - "context_line": mock.ANY, - "post_context": mock.ANY, - } - ] - }, - } - - loc = json["mapping"]["d:whatever@second"][0] - line = linecache.getline(loc["abs_path"], loc["lineno"]) - assert ( - line.strip() - == 'with metrics.timing("whatever", tags={"blub": "blah"}, timestamp=ts):' - ) - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_timing_decorator( - sentry_init, capture_envelopes, maybe_monkeypatched_threading -): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": True}, - ) - envelopes = capture_envelopes() - - @metrics.timing("whatever-1", tags={"x": "y"}) - def amazing(): - time.sleep(0.1) - return 42 - - @metrics.timing("whatever-2", tags={"x": "y"}, unit="nanosecond") - def amazing_nano(): - time.sleep(0.01) - return 23 - - assert amazing() == 42 - assert amazing_nano() == 23 - sentry_sdk.flush() - - (envelope,) = envelopes - statsd_item, meta_item = envelope.items - - assert statsd_item.headers["type"] == "statsd" - m = parse_metrics(statsd_item.payload.get_bytes()) - - assert len(m) == 2 - assert m[0][1] == "whatever-1@second" - assert m[0][2] == "d" - assert len(m[0][3]) == 1 - assert float(m[0][3][0]) >= 0.1 - assert m[0][4] == { - "x": "y", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert m[1][1] == "whatever-2@nanosecond" - assert m[1][2] == "d" - assert len(m[1][3]) == 1 - assert float(m[1][3][0]) >= 10000000.0 - assert m[1][4] == { - "x": "y", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert meta_item.headers["type"] == "metric_meta" - json = parse_json(meta_item.payload.get_bytes()) - assert json == { - "timestamp": mock.ANY, - "mapping": { - "d:whatever-1@second": [ - { - "type": "location", - "filename": "tests/test_metrics.py", - "abs_path": __file__, - "function": sys._getframe().f_code.co_name, - "module": __name__, - "lineno": mock.ANY, - "pre_context": mock.ANY, - "context_line": mock.ANY, - "post_context": mock.ANY, - } - ], - "d:whatever-2@nanosecond": [ - { - "type": "location", - "filename": "tests/test_metrics.py", - "abs_path": __file__, - "function": sys._getframe().f_code.co_name, - "module": __name__, - "lineno": mock.ANY, - "pre_context": mock.ANY, - "context_line": mock.ANY, - "post_context": mock.ANY, - } - ], - }, - } - - # XXX: this is not the best location. It would probably be better to - # report the location in the function, however that is quite a bit - # tricker to do since we report from outside the function so we really - # only see the callsite. - loc = json["mapping"]["d:whatever-1@second"][0] - line = linecache.getline(loc["abs_path"], loc["lineno"]) - assert line.strip() == "assert amazing() == 42" - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_timing_basic(sentry_init, capture_envelopes, maybe_monkeypatched_threading): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": True}, - ) - ts = time.time() - envelopes = capture_envelopes() - - metrics.timing("timing", 1.0, tags={"a": "b"}, timestamp=ts) - metrics.timing("timing", 2.0, tags={"a": "b"}, timestamp=ts) - metrics.timing("timing", 2.0, tags={"a": "b"}, timestamp=ts) - metrics.timing("timing", 3.0, tags={"a": "b"}, timestamp=ts) - sentry_sdk.flush() - - (envelope,) = envelopes - statsd_item, meta_item = envelope.items - - assert statsd_item.headers["type"] == "statsd" - m = parse_metrics(statsd_item.payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "timing@second" - assert m[0][2] == "d" - assert len(m[0][3]) == 4 - assert sorted(map(float, m[0][3])) == [1.0, 2.0, 2.0, 3.0] - assert m[0][4] == { - "a": "b", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert meta_item.headers["type"] == "metric_meta" - assert parse_json(meta_item.payload.get_bytes()) == { - "timestamp": mock.ANY, - "mapping": { - "d:timing@second": [ - { - "type": "location", - "filename": "tests/test_metrics.py", - "abs_path": __file__, - "function": sys._getframe().f_code.co_name, - "module": __name__, - "lineno": mock.ANY, - "pre_context": mock.ANY, - "context_line": mock.ANY, - "post_context": mock.ANY, - } - ] - }, - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_distribution(sentry_init, capture_envelopes, maybe_monkeypatched_threading): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": True}, - ) - ts = time.time() - envelopes = capture_envelopes() - - metrics.distribution("dist", 1.0, tags={"a": "b"}, timestamp=ts) - metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts) - metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts) - metrics.distribution("dist", 3.0, tags={"a": "b"}, timestamp=ts) - sentry_sdk.flush() - - (envelope,) = envelopes - statsd_item, meta_item = envelope.items - - assert statsd_item.headers["type"] == "statsd" - m = parse_metrics(statsd_item.payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "dist@none" - assert m[0][2] == "d" - assert len(m[0][3]) == 4 - assert sorted(map(float, m[0][3])) == [1.0, 2.0, 2.0, 3.0] - assert m[0][4] == { - "a": "b", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert meta_item.headers["type"] == "metric_meta" - json = parse_json(meta_item.payload.get_bytes()) - assert json == { - "timestamp": mock.ANY, - "mapping": { - "d:dist@none": [ - { - "type": "location", - "filename": "tests/test_metrics.py", - "abs_path": __file__, - "function": sys._getframe().f_code.co_name, - "module": __name__, - "lineno": mock.ANY, - "pre_context": mock.ANY, - "context_line": mock.ANY, - "post_context": mock.ANY, - } - ] - }, - } - - loc = json["mapping"]["d:dist@none"][0] - line = linecache.getline(loc["abs_path"], loc["lineno"]) - assert ( - line.strip() - == 'metrics.distribution("dist", 1.0, tags={"a": "b"}, timestamp=ts)' - ) - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_set(sentry_init, capture_envelopes, maybe_monkeypatched_threading): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": True}, - ) - ts = time.time() - envelopes = capture_envelopes() - - metrics.set("my-set", "peter", tags={"magic": "puff"}, timestamp=ts) - metrics.set("my-set", "paul", tags={"magic": "puff"}, timestamp=ts) - metrics.set("my-set", "mary", tags={"magic": "puff"}, timestamp=ts) - sentry_sdk.flush() - - (envelope,) = envelopes - statsd_item, meta_item = envelope.items - - assert statsd_item.headers["type"] == "statsd" - m = parse_metrics(statsd_item.payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "my-set@none" - assert m[0][2] == "s" - assert len(m[0][3]) == 3 - assert sorted(map(int, m[0][3])) == [354582103, 2513273657, 3329318813] - assert m[0][4] == { - "magic": "puff", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert meta_item.headers["type"] == "metric_meta" - assert parse_json(meta_item.payload.get_bytes()) == { - "timestamp": mock.ANY, - "mapping": { - "s:my-set@none": [ - { - "type": "location", - "filename": "tests/test_metrics.py", - "abs_path": __file__, - "function": sys._getframe().f_code.co_name, - "module": __name__, - "lineno": mock.ANY, - "pre_context": mock.ANY, - "context_line": mock.ANY, - "post_context": mock.ANY, - } - ] - }, - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_gauge(sentry_init, capture_envelopes, maybe_monkeypatched_threading): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": False}, - ) - ts = time.time() - envelopes = capture_envelopes() - - metrics.gauge("my-gauge", 10.0, tags={"x": "y"}, timestamp=ts) - metrics.gauge("my-gauge", 20.0, tags={"x": "y"}, timestamp=ts) - metrics.gauge("my-gauge", 30.0, tags={"x": "y"}, timestamp=ts) - sentry_sdk.flush() - - (envelope,) = envelopes - - assert len(envelope.items) == 1 - assert envelope.items[0].headers["type"] == "statsd" - m = parse_metrics(envelope.items[0].payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "my-gauge@none" - assert m[0][2] == "g" - assert len(m[0][3]) == 5 - assert list(map(float, m[0][3])) == [30.0, 10.0, 30.0, 60.0, 3.0] - assert m[0][4] == { - "x": "y", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_multiple(sentry_init, capture_envelopes): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": False}, - ) - ts = time.time() - envelopes = capture_envelopes() - - metrics.gauge("my-gauge", 10.0, tags={"x": "y"}, timestamp=ts) - metrics.gauge("my-gauge", 20.0, tags={"x": "y"}, timestamp=ts) - metrics.gauge("my-gauge", 30.0, tags={"x": "y"}, timestamp=ts) - for _ in range(10): - metrics.increment("counter-1", 1.0, timestamp=ts) - metrics.increment("counter-2", 1.0, timestamp=ts) - - sentry_sdk.flush() - - (envelope,) = envelopes - - assert len(envelope.items) == 1 - assert envelope.items[0].headers["type"] == "statsd" - m = parse_metrics(envelope.items[0].payload.get_bytes()) - - assert len(m) == 3 - - assert m[0][1] == "counter-1@none" - assert m[0][2] == "c" - assert list(map(float, m[0][3])) == [10.0] - assert m[0][4] == { - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert m[1][1] == "counter-2@none" - assert m[1][2] == "c" - assert list(map(float, m[1][3])) == [1.0] - assert m[1][4] == { - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert m[2][1] == "my-gauge@none" - assert m[2][2] == "g" - assert len(m[2][3]) == 5 - assert list(map(float, m[2][3])) == [30.0, 10.0, 30.0, 60.0, 3.0] - assert m[2][4] == { - "x": "y", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_transaction_name( - sentry_init, capture_envelopes, maybe_monkeypatched_threading -): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": False}, - ) - ts = time.time() - envelopes = capture_envelopes() - - sentry_sdk.get_current_scope().set_transaction_name( - "/user/{user_id}", source=TransactionSource.ROUTE - ) - metrics.distribution("dist", 1.0, tags={"a": "b"}, timestamp=ts) - metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts) - metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts) - metrics.distribution("dist", 3.0, tags={"a": "b"}, timestamp=ts) - - sentry_sdk.flush() - - (envelope,) = envelopes - - assert len(envelope.items) == 1 - assert envelope.items[0].headers["type"] == "statsd" - m = parse_metrics(envelope.items[0].payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "dist@none" - assert m[0][2] == "d" - assert len(m[0][3]) == 4 - assert sorted(map(float, m[0][3])) == [1.0, 2.0, 2.0, 3.0] - assert m[0][4] == { - "a": "b", - "transaction": "/user/{user_id}", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_metric_summaries( - sentry_init, capture_envelopes, maybe_monkeypatched_threading -): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - enable_tracing=True, - ) - ts = time.time() - envelopes = capture_envelopes() - - with sentry_sdk.start_transaction( - op="stuff", name="/foo", source=TransactionSource.ROUTE - ) as transaction: - metrics.increment("root-counter", timestamp=ts) - with metrics.timing("my-timer-metric", tags={"a": "b"}, timestamp=ts): - for x in range(10): - metrics.distribution("my-dist", float(x), timestamp=ts) - - sentry_sdk.flush() - - (transaction, envelope) = envelopes - - # Metrics Emission - assert envelope.items[0].headers["type"] == "statsd" - m = parse_metrics(envelope.items[0].payload.get_bytes()) - - assert len(m) == 3 - - assert m[0][1] == "my-dist@none" - assert m[0][2] == "d" - assert len(m[0][3]) == 10 - assert sorted(m[0][3]) == list(map(str, map(float, range(10)))) - assert m[0][4] == { - "transaction": "/foo", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert m[1][1] == "my-timer-metric@second" - assert m[1][2] == "d" - assert len(m[1][3]) == 1 - assert m[1][4] == { - "a": "b", - "transaction": "/foo", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - assert m[2][1] == "root-counter@none" - assert m[2][2] == "c" - assert m[2][3] == ["1.0"] - assert m[2][4] == { - "transaction": "/foo", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - # Measurement Attachment - t = transaction.items[0].get_transaction_event() - - assert t["_metrics_summary"] == { - "c:root-counter@none": [ - { - "count": 1, - "min": 1.0, - "max": 1.0, - "sum": 1.0, - "tags": { - "transaction": "/foo", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - }, - } - ] - } - - assert t["spans"][0]["_metrics_summary"]["d:my-dist@none"] == [ - { - "count": 10, - "min": 0.0, - "max": 9.0, - "sum": 45.0, - "tags": { - "environment": "not-fun-env", - "release": "fun-release@1.0.0", - "transaction": "/foo", - }, - } - ] - - assert t["spans"][0]["tags"] == {"a": "b"} - (timer,) = t["spans"][0]["_metrics_summary"]["d:my-timer-metric@second"] - assert timer["count"] == 1 - assert timer["max"] == timer["min"] == timer["sum"] - assert timer["sum"] > 0 - assert timer["tags"] == { - "a": "b", - "environment": "not-fun-env", - "release": "fun-release@1.0.0", - "transaction": "/foo", - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -@pytest.mark.parametrize( - "metric_name,metric_unit,expected_name", - [ - ("first-metric", "nano-second", "first-metric@nanosecond"), - ("another_metric?", "nano second", "another_metric_@nanosecond"), - ( - "metric", - "nanosecond", - "metric@nanosecond", - ), - ( - "my.amaze.metric I guess", - "nano|\nsecond", - "my.amaze.metric_I_guess@nanosecond", - ), - ("métríc", "nanöseconď", "m_tr_c@nansecon"), - ], -) -def test_metric_name_normalization( - sentry_init, - capture_envelopes, - metric_name, - metric_unit, - expected_name, - maybe_monkeypatched_threading, -): - sentry_init( - _experiments={"enable_metrics": True, "metric_code_locations": False}, - ) - envelopes = capture_envelopes() - - metrics.distribution(metric_name, 1.0, unit=metric_unit) - - sentry_sdk.flush() - - (envelope,) = envelopes - - assert len(envelope.items) == 1 - assert envelope.items[0].headers["type"] == "statsd" - - parsed_metrics = parse_metrics(envelope.items[0].payload.get_bytes()) - assert len(parsed_metrics) == 1 - - name = parsed_metrics[0][1] - assert name == expected_name - - -@minimum_python_37_with_gevent -@pytest.mark.forked -@pytest.mark.parametrize( - "metric_tag,expected_tag", - [ - ({"f-oo|bar": "%$foo/"}, {"f-oobar": "%$foo/"}), - ({"foo$.$.$bar": "blah{}"}, {"foo..bar": "blah{}"}), - ( - {"foö-bar": "snöwmän"}, - {"fo-bar": "snöwmän"}, - ), - ({"route": "GET /foo"}, {"route": "GET /foo"}), - ({"__bar__": "this | or , that"}, {"__bar__": "this \\u{7c} or \\u{2c} that"}), - ({"foo/": "hello!\n\r\t\\"}, {"foo/": "hello!\\n\\r\\t\\\\"}), - ], -) -def test_metric_tag_normalization( - sentry_init, - capture_envelopes, - metric_tag, - expected_tag, - maybe_monkeypatched_threading, -): - sentry_init( - _experiments={"enable_metrics": True, "metric_code_locations": False}, - ) - envelopes = capture_envelopes() - - metrics.distribution("a", 1.0, tags=metric_tag) - - sentry_sdk.flush() - - (envelope,) = envelopes - - assert len(envelope.items) == 1 - assert envelope.items[0].headers["type"] == "statsd" - - parsed_metrics = parse_metrics(envelope.items[0].payload.get_bytes()) - assert len(parsed_metrics) == 1 - - tags = parsed_metrics[0][4] - - expected_tag_key, expected_tag_value = expected_tag.popitem() - assert expected_tag_key in tags - assert tags[expected_tag_key] == expected_tag_value - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_before_emit_metric( - sentry_init, capture_envelopes, maybe_monkeypatched_threading -): - def before_emit(key, value, unit, tags): - if key == "removed-metric" or value == 47 or unit == "unsupported": - return False - - tags["extra"] = "foo" - del tags["release"] - # this better be a noop! - metrics.increment("shitty-recursion") - return True - - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={ - "enable_metrics": True, - "metric_code_locations": False, - "before_emit_metric": before_emit, - }, - ) - envelopes = capture_envelopes() - - metrics.increment("removed-metric", 1.0) - metrics.increment("another-removed-metric", 47) - metrics.increment("yet-another-removed-metric", 1.0, unit="unsupported") - metrics.increment("actual-metric", 1.0) - sentry_sdk.flush() - - (envelope,) = envelopes - - assert len(envelope.items) == 1 - assert envelope.items[0].headers["type"] == "statsd" - m = parse_metrics(envelope.items[0].payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "actual-metric@none" - assert m[0][3] == ["1.0"] - assert m[0][4] == { - "extra": "foo", - "environment": "not-fun-env", - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_aggregator_flush( - sentry_init, capture_envelopes, maybe_monkeypatched_threading -): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - _experiments={ - "enable_metrics": True, - }, - ) - envelopes = capture_envelopes() - - metrics.increment("a-metric", 1.0) - sentry_sdk.flush() - - assert len(envelopes) == 1 - assert sentry_sdk.get_client().metrics_aggregator.buckets == {} - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_tag_serialization( - sentry_init, capture_envelopes, maybe_monkeypatched_threading -): - sentry_init( - release="fun-release", - environment="not-fun-env", - _experiments={"enable_metrics": True, "metric_code_locations": False}, - ) - envelopes = capture_envelopes() - - metrics.increment( - "counter", - tags={ - "no-value": None, - "an-int": 42, - "a-float": 23.0, - "a-string": "blah", - "more-than-one": [1, "zwei", "3.0", None], - }, - ) - sentry_sdk.flush() - - (envelope,) = envelopes - - assert len(envelope.items) == 1 - assert envelope.items[0].headers["type"] == "statsd" - m = parse_metrics(envelope.items[0].payload.get_bytes()) - - assert len(m) == 1 - assert m[0][4] == { - "an-int": "42", - "a-float": "23.0", - "a-string": "blah", - "more-than-one": ["1", "3.0", "zwei"], - "release": "fun-release", - "environment": "not-fun-env", - } - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_flush_recursion_protection( - sentry_init, capture_envelopes, monkeypatch, maybe_monkeypatched_threading -): - sentry_init( - release="fun-release", - environment="not-fun-env", - _experiments={"enable_metrics": True}, - ) - envelopes = capture_envelopes() - test_client = sentry_sdk.get_client() - - real_capture_envelope = test_client.transport.capture_envelope - - def bad_capture_envelope(*args, **kwargs): - metrics.increment("bad-metric") - return real_capture_envelope(*args, **kwargs) - - monkeypatch.setattr(test_client.transport, "capture_envelope", bad_capture_envelope) - - metrics.increment("counter") - - # flush twice to see the inner metric - sentry_sdk.flush() - sentry_sdk.flush() - - (envelope,) = envelopes - m = parse_metrics(envelope.items[0].payload.get_bytes()) - assert len(m) == 1 - assert m[0][1] == "counter@none" - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_flush_recursion_protection_background_flush( - sentry_init, capture_envelopes, monkeypatch, maybe_monkeypatched_threading -): - monkeypatch.setattr(metrics.MetricsAggregator, "FLUSHER_SLEEP_TIME", 0.01) - sentry_init( - release="fun-release", - environment="not-fun-env", - _experiments={"enable_metrics": True}, - ) - envelopes = capture_envelopes() - test_client = sentry_sdk.get_client() - - real_capture_envelope = test_client.transport.capture_envelope - - def bad_capture_envelope(*args, **kwargs): - metrics.increment("bad-metric") - return real_capture_envelope(*args, **kwargs) - - monkeypatch.setattr(test_client.transport, "capture_envelope", bad_capture_envelope) - - metrics.increment("counter") - - # flush via sleep and flag - sentry_sdk.get_client().metrics_aggregator._force_flush = True - time.sleep(0.5) - - (envelope,) = envelopes - m = parse_metrics(envelope.items[0].payload.get_bytes()) - assert len(m) == 1 - assert m[0][1] == "counter@none" - - -@pytest.mark.skipif( - not gevent or sys.version_info >= (3, 7), - reason="Python 3.6 or lower and gevent required", -) -@pytest.mark.forked -def test_disable_metrics_for_old_python_with_gevent( - sentry_init, capture_envelopes, maybe_monkeypatched_threading -): - if maybe_monkeypatched_threading != "greenlet": - pytest.skip("Test specifically for gevent/greenlet") - - sentry_init( - release="fun-release", - environment="not-fun-env", - _experiments={"enable_metrics": True}, - ) - envelopes = capture_envelopes() - - metrics.incr("counter") - - sentry_sdk.flush() - - assert sentry_sdk.get_client().metrics_aggregator is None - assert not envelopes diff --git a/tests/test_monitor.py b/tests/test_monitor.py index b48d9f6282..42f9c1960c 100644 --- a/tests/test_monitor.py +++ b/tests/test_monitor.py @@ -54,14 +54,16 @@ def test_monitor_unhealthy(sentry_init): assert monitor.downsample_factor == (i + 1 if i < 10 else 10) -def test_transaction_uses_downsampled_rate( - sentry_init, capture_record_lost_event_calls, monkeypatch +def test_root_span_uses_downsample_rate( + sentry_init, capture_envelopes, capture_record_lost_event_calls, monkeypatch ): sentry_init( traces_sample_rate=1.0, transport=UnhealthyTestTransport(), ) + envelopes = capture_envelopes() + record_lost_event_calls = capture_record_lost_event_calls() monitor = sentry_sdk.get_client().monitor @@ -72,16 +74,33 @@ def test_transaction_uses_downsampled_rate( assert monitor.is_healthy() is False assert monitor.downsample_factor == 1 - # make sure we don't sample the transaction + # make sure we don't sample the root span with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.75): - with sentry_sdk.start_transaction(name="foobar") as transaction: - assert transaction.sampled is False - assert transaction.sample_rate == 0.5 + with sentry_sdk.start_span(name="foobar") as root_span: + with sentry_sdk.start_span(name="foospan"): + with sentry_sdk.start_span(name="foospan2"): + with sentry_sdk.start_span(name="foospan3"): + ... + + assert root_span.sampled is False + assert root_span.sample_rate == 0.5 + + assert len(envelopes) == 0 assert Counter(record_lost_event_calls) == Counter( [ - ("backpressure", "transaction", None, 1), - ("backpressure", "span", None, 1), + ( + "backpressure", + "transaction", + None, + 1, + ), + ( + "backpressure", + "span", + None, + 1, + ), # Only one span (the root span itself) is counted, since we did not record any spans in the first place. ] ) diff --git a/tests/test_propagationcontext.py b/tests/test_propagationcontext.py index a0ce1094fa..797a18cecd 100644 --- a/tests/test_propagationcontext.py +++ b/tests/test_propagationcontext.py @@ -3,7 +3,7 @@ import pytest -from sentry_sdk.tracing_utils import PropagationContext +from sentry_sdk.tracing_utils import Baggage, PropagationContext SAMPLED_FLAG = { @@ -29,23 +29,26 @@ def test_empty_context(): def test_context_with_values(): + baggage = Baggage( + sentry_items={ + "sentry-trace": "1234567890abcdef1234567890abcdef-1234567890abcdef-1" + }, + third_party_items={"foo": "bar"}, + mutable=False, + ) ctx = PropagationContext( trace_id="1234567890abcdef1234567890abcdef", span_id="1234567890abcdef", parent_span_id="abcdef1234567890", parent_sampled=True, - dynamic_sampling_context={ - "foo": "bar", - }, + baggage=baggage, ) assert ctx.trace_id == "1234567890abcdef1234567890abcdef" assert ctx.span_id == "1234567890abcdef" assert ctx.parent_span_id == "abcdef1234567890" assert ctx.parent_sampled - assert ctx.dynamic_sampling_context == { - "foo": "bar", - } + assert ctx.baggage == baggage def test_lazy_uuids(): @@ -101,11 +104,11 @@ def test_update(): def test_existing_sample_rand_kept(): ctx = PropagationContext( trace_id="00000000000000000000000000000000", - dynamic_sampling_context={"sample_rand": "0.5"}, + baggage=Baggage(sentry_items={"sample_rand": "0.5"}), ) - # If sample_rand was regenerated, the value would be 0.919221 based on the trace_id assert ctx.dynamic_sampling_context["sample_rand"] == "0.5" + assert ctx.baggage.sentry_items["sample_rand"] == "0.5" @pytest.mark.parametrize( @@ -155,7 +158,7 @@ def mock_random_class(seed): ) assert ( - ctx.dynamic_sampling_context["sample_rand"] + ctx.dynamic_sampling_context.get("sample_rand") == f"{expected_interval[0]:.6f}" # noqa: E231 ) assert mock_uniform.call_count == 1 diff --git a/tests/test_scope.py b/tests/test_scope.py index 9b16dc4344..507e76046c 100644 --- a/tests/test_scope.py +++ b/tests/test_scope.py @@ -11,12 +11,22 @@ ) from sentry_sdk.client import Client, NonRecordingClient from sentry_sdk.scope import ( - Scope, + Scope as BaseScope, ScopeType, - use_isolation_scope, - use_scope, should_send_default_pii, ) +from sentry_sdk.opentelemetry.scope import ( + PotelScope as Scope, + use_scope, + use_isolation_scope, + setup_scope_context_management, +) +from tests.conftest import ApproxDict + + +@pytest.fixture(autouse=True) +def setup_otel_scope_management(): + setup_scope_context_management() def test_copying(): @@ -230,7 +240,7 @@ def test_get_isolation_scope(): def test_get_global_scope(): scope = Scope.get_global_scope() assert scope is not None - assert scope.__class__ == Scope + assert scope.__class__ == BaseScope assert scope._type == ScopeType.GLOBAL @@ -797,7 +807,7 @@ def test_nested_scopes_with_tags(sentry_init, capture_envelopes): with sentry_sdk.new_scope() as scope2: scope2.set_tag("current_scope2", 1) - with sentry_sdk.start_transaction(name="trx") as trx: + with sentry_sdk.start_span(name="trx") as trx: trx.set_tag("trx", 1) with sentry_sdk.start_span(op="span1") as span1: @@ -813,8 +823,8 @@ def test_nested_scopes_with_tags(sentry_init, capture_envelopes): transaction = envelope.items[0].get_transaction_event() assert transaction["tags"] == {"isolation_scope1": 1, "current_scope2": 1, "trx": 1} - assert transaction["spans"][0]["tags"] == {"a": 1} - assert transaction["spans"][1]["tags"] == {"b": 1} + assert transaction["spans"][0]["tags"] == ApproxDict({"a": 1}) + assert transaction["spans"][1]["tags"] == ApproxDict({"b": 1}) def test_should_send_default_pii_true(sentry_init): @@ -874,7 +884,7 @@ def test_set_tags(): def test_last_event_id(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) assert Scope.last_event_id() is None @@ -884,18 +894,18 @@ def test_last_event_id(sentry_init): def test_last_event_id_transaction(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) assert Scope.last_event_id() is None - with sentry_sdk.start_transaction(name="test"): + with sentry_sdk.start_span(name="test"): pass assert Scope.last_event_id() is None, "Transaction should not set last_event_id" def test_last_event_id_cleared(sentry_init): - sentry_init(enable_tracing=True) + sentry_init(traces_sample_rate=1.0) # Make sure last_event_id is set sentry_sdk.capture_exception(Exception("test")) @@ -905,3 +915,18 @@ def test_last_event_id_cleared(sentry_init): Scope.get_isolation_scope().clear() assert Scope.last_event_id() is None, "last_event_id should be cleared" + + +def test_root_span(sentry_init): + sentry_init(traces_sample_rate=1.0) + + assert sentry_sdk.get_current_scope().root_span is None + + with sentry_sdk.start_span(name="test") as root_span: + assert sentry_sdk.get_current_scope().root_span == root_span + with sentry_sdk.start_span(name="child"): + assert sentry_sdk.get_current_scope().root_span == root_span + with sentry_sdk.start_span(name="grandchild"): + assert sentry_sdk.get_current_scope().root_span == root_span + + assert sentry_sdk.get_current_scope().root_span is None diff --git a/tests/test_scrubber.py b/tests/test_scrubber.py index 2cc5f4139f..cc99411778 100644 --- a/tests/test_scrubber.py +++ b/tests/test_scrubber.py @@ -1,7 +1,8 @@ import sys import logging -from sentry_sdk import capture_exception, capture_event, start_transaction, start_span +from sentry_sdk import capture_exception, capture_event, start_span +from sentry_sdk.integrations.logging import LoggingIntegration from sentry_sdk.utils import event_from_exception from sentry_sdk.scrubber import EventScrubber from tests.conftest import ApproxDict @@ -119,7 +120,10 @@ def test_stack_var_scrubbing(sentry_init, capture_events): def test_breadcrumb_extra_scrubbing(sentry_init, capture_events): - sentry_init(max_breadcrumbs=2) + sentry_init( + max_breadcrumbs=2, + integrations=[LoggingIntegration(event_level="ERROR")], + ) events = capture_events() logger.info("breadcrumb 1", extra=dict(foo=1, password="secret")) logger.info("breadcrumb 2", extra=dict(bar=2, auth="secret")) @@ -153,10 +157,10 @@ def test_span_data_scrubbing(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() - with start_transaction(name="hi"): + with start_span(name="hi"): with start_span(op="foo", name="bar") as span: - span.set_data("password", "secret") - span.set_data("datafoo", "databar") + span.set_attribute("password", "secret") + span.set_attribute("datafoo", "databar") (event,) = events assert event["spans"][0]["data"] == ApproxDict( diff --git a/tests/test_sessions.py b/tests/test_sessions.py index 9cad0b7252..711c556e34 100644 --- a/tests/test_sessions.py +++ b/tests/test_sessions.py @@ -1,7 +1,7 @@ from unittest import mock import sentry_sdk -from sentry_sdk.sessions import auto_session_tracking, track_session +from sentry_sdk.sessions import track_session def sorted_aggregates(item): @@ -83,52 +83,13 @@ def test_aggregates(sentry_init, capture_envelopes): assert aggregates[0]["errored"] == 1 -def test_aggregates_deprecated( - sentry_init, capture_envelopes, suppress_deprecation_warnings -): - sentry_init( - release="fun-release", - environment="not-fun-env", - ) - envelopes = capture_envelopes() - - with auto_session_tracking(session_mode="request"): - with sentry_sdk.new_scope() as scope: - try: - scope.set_user({"id": "42"}) - raise Exception("all is wrong") - except Exception: - sentry_sdk.capture_exception() - - with auto_session_tracking(session_mode="request"): - pass - - sentry_sdk.get_isolation_scope().start_session(session_mode="request") - sentry_sdk.get_isolation_scope().end_session() - sentry_sdk.flush() - - assert len(envelopes) == 2 - assert envelopes[0].get_event() is not None - - sess = envelopes[1] - assert len(sess.items) == 1 - sess_event = sess.items[0].payload.json - assert sess_event["attrs"] == { - "release": "fun-release", - "environment": "not-fun-env", - } - - aggregates = sorted_aggregates(sess_event) - assert len(aggregates) == 1 - assert aggregates[0]["exited"] == 2 - assert aggregates[0]["errored"] == 1 - - def test_aggregates_explicitly_disabled_session_tracking_request_mode( sentry_init, capture_envelopes ): sentry_init( - release="fun-release", environment="not-fun-env", auto_session_tracking=False + release="fun-release", + environment="not-fun-env", + auto_session_tracking=False, ) envelopes = capture_envelopes() @@ -157,38 +118,6 @@ def test_aggregates_explicitly_disabled_session_tracking_request_mode( assert "errored" not in aggregates[0] -def test_aggregates_explicitly_disabled_session_tracking_request_mode_deprecated( - sentry_init, capture_envelopes, suppress_deprecation_warnings -): - sentry_init( - release="fun-release", environment="not-fun-env", auto_session_tracking=False - ) - envelopes = capture_envelopes() - - with auto_session_tracking(session_mode="request"): - with sentry_sdk.new_scope(): - try: - raise Exception("all is wrong") - except Exception: - sentry_sdk.capture_exception() - - with auto_session_tracking(session_mode="request"): - pass - - sentry_sdk.get_isolation_scope().start_session(session_mode="request") - sentry_sdk.get_isolation_scope().end_session() - sentry_sdk.flush() - - sess = envelopes[1] - assert len(sess.items) == 1 - sess_event = sess.items[0].payload.json - - aggregates = sorted_aggregates(sess_event) - assert len(aggregates) == 1 - assert aggregates[0]["exited"] == 1 - assert "errored" not in aggregates[0] - - def test_no_thread_on_shutdown_no_errors(sentry_init): sentry_init( release="fun-release", @@ -216,33 +145,3 @@ def test_no_thread_on_shutdown_no_errors(sentry_init): sentry_sdk.flush() # If we reach this point without error, the test is successful. - - -def test_no_thread_on_shutdown_no_errors_deprecated( - sentry_init, suppress_deprecation_warnings -): - sentry_init( - release="fun-release", - environment="not-fun-env", - ) - - # make it seem like the interpreter is shutting down - with mock.patch( - "threading.Thread.start", - side_effect=RuntimeError("can't create new thread at interpreter shutdown"), - ): - with auto_session_tracking(session_mode="request"): - with sentry_sdk.new_scope(): - try: - raise Exception("all is wrong") - except Exception: - sentry_sdk.capture_exception() - - with auto_session_tracking(session_mode="request"): - pass - - sentry_sdk.get_isolation_scope().start_session(session_mode="request") - sentry_sdk.get_isolation_scope().end_session() - sentry_sdk.flush() - - # If we reach this point without error, the test is successful. diff --git a/tests/test_transport.py b/tests/test_transport.py index 6eb7cdf829..dccb91804e 100644 --- a/tests/test_transport.py +++ b/tests/test_transport.py @@ -31,14 +31,12 @@ capture_message, isolation_scope, get_isolation_scope, - Hub, ) -from sentry_sdk._compat import PY37, PY38 -from sentry_sdk.envelope import Envelope, Item, parse_json +from sentry_sdk.envelope import Envelope, parse_json +from sentry_sdk._compat import PY38 from sentry_sdk.transport import ( KEEP_ALIVE_SOCKET_OPTIONS, _parse_rate_limits, - HttpTransport, ) from sentry_sdk.integrations.logging import LoggingIntegration, ignore_logger @@ -134,14 +132,7 @@ def mock_transaction_envelope(span_count): @pytest.mark.parametrize("client_flush_method", ["close", "flush"]) @pytest.mark.parametrize("use_pickle", (True, False)) @pytest.mark.parametrize("compression_level", (0, 9, None)) -@pytest.mark.parametrize( - "compression_algo", - ( - ("gzip", "br", "", None) - if PY37 or gevent is None - else ("gzip", "", None) - ), -) +@pytest.mark.parametrize("compression_algo", ("gzip", "br", "", None)) @pytest.mark.parametrize("http2", [True, False] if PY38 else [False]) def test_transport_works( capturing_server, @@ -650,135 +641,6 @@ def test_complex_limits_without_data_category( assert len(capturing_server.captured) == 0 -@pytest.mark.parametrize("response_code", [200, 429]) -def test_metric_bucket_limits(capturing_server, response_code, make_client): - client = make_client() - capturing_server.respond_with( - code=response_code, - headers={ - "X-Sentry-Rate-Limits": "4711:metric_bucket:organization:quota_exceeded:custom" - }, - ) - - envelope = Envelope() - envelope.add_item(Item(payload=b"{}", type="statsd")) - client.transport.capture_envelope(envelope) - client.flush() - - assert len(capturing_server.captured) == 1 - assert capturing_server.captured[0].path == "/api/132/envelope/" - capturing_server.clear_captured() - - assert set(client.transport._disabled_until) == set(["metric_bucket"]) - - client.transport.capture_envelope(envelope) - client.capture_event({"type": "transaction"}) - client.flush() - - assert len(capturing_server.captured) == 2 - - envelope = capturing_server.captured[0].envelope - assert envelope.items[0].type == "transaction" - envelope = capturing_server.captured[1].envelope - assert envelope.items[0].type == "client_report" - report = parse_json(envelope.items[0].get_bytes()) - assert report["discarded_events"] == [ - {"category": "metric_bucket", "reason": "ratelimit_backoff", "quantity": 1}, - ] - - -@pytest.mark.parametrize("response_code", [200, 429]) -def test_metric_bucket_limits_with_namespace( - capturing_server, response_code, make_client -): - client = make_client() - capturing_server.respond_with( - code=response_code, - headers={ - "X-Sentry-Rate-Limits": "4711:metric_bucket:organization:quota_exceeded:foo" - }, - ) - - envelope = Envelope() - envelope.add_item(Item(payload=b"{}", type="statsd")) - client.transport.capture_envelope(envelope) - client.flush() - - assert len(capturing_server.captured) == 1 - assert capturing_server.captured[0].path == "/api/132/envelope/" - capturing_server.clear_captured() - - assert set(client.transport._disabled_until) == set([]) - - client.transport.capture_envelope(envelope) - client.capture_event({"type": "transaction"}) - client.flush() - - assert len(capturing_server.captured) == 2 - - envelope = capturing_server.captured[0].envelope - assert envelope.items[0].type == "statsd" - envelope = capturing_server.captured[1].envelope - assert envelope.items[0].type == "transaction" - - -@pytest.mark.parametrize("response_code", [200, 429]) -def test_metric_bucket_limits_with_all_namespaces( - capturing_server, response_code, make_client -): - client = make_client() - capturing_server.respond_with( - code=response_code, - headers={ - "X-Sentry-Rate-Limits": "4711:metric_bucket:organization:quota_exceeded" - }, - ) - - envelope = Envelope() - envelope.add_item(Item(payload=b"{}", type="statsd")) - client.transport.capture_envelope(envelope) - client.flush() - - assert len(capturing_server.captured) == 1 - assert capturing_server.captured[0].path == "/api/132/envelope/" - capturing_server.clear_captured() - - assert set(client.transport._disabled_until) == set(["metric_bucket"]) - - client.transport.capture_envelope(envelope) - client.capture_event({"type": "transaction"}) - client.flush() - - assert len(capturing_server.captured) == 2 - - envelope = capturing_server.captured[0].envelope - assert envelope.items[0].type == "transaction" - envelope = capturing_server.captured[1].envelope - assert envelope.items[0].type == "client_report" - report = parse_json(envelope.items[0].get_bytes()) - assert report["discarded_events"] == [ - {"category": "metric_bucket", "reason": "ratelimit_backoff", "quantity": 1}, - ] - - -def test_hub_cls_backwards_compat(): - class TestCustomHubClass(Hub): - pass - - transport = HttpTransport( - defaultdict(lambda: None, {"dsn": "https://123abc@example.com/123"}) - ) - - with pytest.deprecated_call(): - assert transport.hub_cls is Hub - - with pytest.deprecated_call(): - transport.hub_cls = TestCustomHubClass - - with pytest.deprecated_call(): - assert transport.hub_cls is TestCustomHubClass - - @pytest.mark.parametrize("quantity", (1, 2, 10)) def test_record_lost_event_quantity(capturing_server, make_client, quantity): client = make_client() diff --git a/tests/test_utils.py b/tests/test_utils.py index b731c3e3ab..3ac826141b 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -32,6 +32,7 @@ _get_installed_modules, _generate_installed_modules, ensure_integration_enabled, + _serialize_span_attribute, ) @@ -61,55 +62,6 @@ def _normalize_distribution_name(name): return re.sub(r"[-_.]+", "-", name).lower() -@pytest.mark.parametrize( - ("input_str", "expected_output"), - ( - ( - "2021-01-01T00:00:00.000000Z", - datetime(2021, 1, 1, tzinfo=timezone.utc), - ), # UTC time - ( - "2021-01-01T00:00:00.000000", - datetime(2021, 1, 1).astimezone(timezone.utc), - ), # No TZ -- assume local but convert to UTC - ( - "2021-01-01T00:00:00Z", - datetime(2021, 1, 1, tzinfo=timezone.utc), - ), # UTC - No milliseconds - ( - "2021-01-01T00:00:00.000000+00:00", - datetime(2021, 1, 1, tzinfo=timezone.utc), - ), - ( - "2021-01-01T00:00:00.000000-00:00", - datetime(2021, 1, 1, tzinfo=timezone.utc), - ), - ( - "2021-01-01T00:00:00.000000+0000", - datetime(2021, 1, 1, tzinfo=timezone.utc), - ), - ( - "2021-01-01T00:00:00.000000-0000", - datetime(2021, 1, 1, tzinfo=timezone.utc), - ), - ( - "2020-12-31T00:00:00.000000+02:00", - datetime(2020, 12, 31, tzinfo=timezone(timedelta(hours=2))), - ), # UTC+2 time - ( - "2020-12-31T00:00:00.000000-0200", - datetime(2020, 12, 31, tzinfo=timezone(timedelta(hours=-2))), - ), # UTC-2 time - ( - "2020-12-31T00:00:00-0200", - datetime(2020, 12, 31, tzinfo=timezone(timedelta(hours=-2))), - ), # UTC-2 time - no milliseconds - ), -) -def test_datetime_from_isoformat(input_str, expected_output): - assert datetime_from_isoformat(input_str) == expected_output, input_str - - @pytest.mark.parametrize( "env_var_value,strict,expected", [ @@ -955,6 +907,86 @@ def test_format_timestamp_naive(): assert re.fullmatch(timestamp_regex, format_timestamp(datetime_object)) +class NoStr: + def __str__(self): + 1 / 0 + + +@pytest.mark.parametrize( + ("value", "result"), + ( + ("meow", "meow"), + (1, 1), + (47.0, 47.0), + (True, True), + (["meow", "bark"], ["meow", "bark"]), + ([True, False], [True, False]), + ([1, 2, 3], [1, 2, 3]), + ([46.5, 47.0, 47.5], [46.5, 47.0, 47.5]), + (["meow", 47], '["meow", 47]'), # mixed types not allowed in a list + (None, "null"), + ( + {"cat": "meow", "dog": ["bark", "woof"]}, + '{"cat": "meow", "dog": ["bark", "woof"]}', + ), + (datetime(2024, 1, 1), "2024-01-01 00:00:00"), + (("meow", "purr"), ["meow", "purr"]), + (NoStr(), None), + ), +) +def test_serialize_span_attribute(value, result): + assert _serialize_span_attribute(value) == result + + +@pytest.mark.parametrize( + ("input_str", "expected_output"), + ( + ( + "2021-01-01T00:00:00.000000Z", + datetime(2021, 1, 1, tzinfo=timezone.utc), + ), # UTC time + ( + "2021-01-01T00:00:00.000000", + datetime(2021, 1, 1, tzinfo=datetime.now().astimezone().tzinfo), + ), # No TZ -- assume UTC + ( + "2021-01-01T00:00:00Z", + datetime(2021, 1, 1, tzinfo=timezone.utc), + ), # UTC - No milliseconds + ( + "2021-01-01T00:00:00.000000+00:00", + datetime(2021, 1, 1, tzinfo=timezone.utc), + ), + ( + "2021-01-01T00:00:00.000000-00:00", + datetime(2021, 1, 1, tzinfo=timezone.utc), + ), + ( + "2021-01-01T00:00:00.000000+0000", + datetime(2021, 1, 1, tzinfo=timezone.utc), + ), + ( + "2021-01-01T00:00:00.000000-0000", + datetime(2021, 1, 1, tzinfo=timezone.utc), + ), + ( + "2020-12-31T00:00:00.000000+02:00", + datetime(2020, 12, 31, tzinfo=timezone(timedelta(hours=2))), + ), # UTC+2 time + ( + "2020-12-31T00:00:00.000000-0200", + datetime(2020, 12, 31, tzinfo=timezone(timedelta(hours=-2))), + ), # UTC-2 time + ( + "2020-12-31T00:00:00-0200", + datetime(2020, 12, 31, tzinfo=timezone(timedelta(hours=-2))), + ), # UTC-2 time - no milliseconds + ), +) +def test_datetime_from_isoformat(input_str, expected_output): + assert datetime_from_isoformat(input_str) == expected_output, input_str + + def test_qualname_from_function_inner_function(): def test_function(): ... diff --git a/tests/tracing/test_decorator.py b/tests/tracing/test_decorator.py index 18a66bd43e..ed6dedb26f 100644 --- a/tests/tracing/test_decorator.py +++ b/tests/tracing/test_decorator.py @@ -31,6 +31,7 @@ def test_trace_decorator(): assert result2 == "return_of_sync_function" +@pytest.mark.forked def test_trace_decorator_no_trx(): with patch_start_tracing_child(fake_transaction_is_none=True): with mock.patch.object(logger, "debug", mock.Mock()) as fake_debug: diff --git a/tests/tracing/test_deprecated.py b/tests/tracing/test_deprecated.py deleted file mode 100644 index fb58e43ebf..0000000000 --- a/tests/tracing/test_deprecated.py +++ /dev/null @@ -1,59 +0,0 @@ -import warnings - -import pytest - -import sentry_sdk -import sentry_sdk.tracing -from sentry_sdk import start_span - -from sentry_sdk.tracing import Span - - -@pytest.mark.skip(reason="This deprecated feature has been removed in SDK 2.0.") -def test_start_span_to_start_transaction(sentry_init, capture_events): - # XXX: this only exists for backwards compatibility with code before - # Transaction / start_transaction were introduced. - sentry_init(traces_sample_rate=1.0) - events = capture_events() - - with start_span(transaction="/1/"): - pass - - with start_span(Span(transaction="/2/")): - pass - - assert len(events) == 2 - assert events[0]["transaction"] == "/1/" - assert events[1]["transaction"] == "/2/" - - -@pytest.mark.parametrize( - "parameter_value_getter", - # Use lambda to avoid Hub deprecation warning here (will suppress it in the test) - (lambda: sentry_sdk.Hub(), lambda: sentry_sdk.Scope()), -) -def test_passing_hub_parameter_to_transaction_finish( - suppress_deprecation_warnings, parameter_value_getter -): - parameter_value = parameter_value_getter() - transaction = sentry_sdk.tracing.Transaction() - with pytest.warns(DeprecationWarning): - transaction.finish(hub=parameter_value) - - -def test_passing_hub_object_to_scope_transaction_finish(suppress_deprecation_warnings): - transaction = sentry_sdk.tracing.Transaction() - - # Do not move the following line under the `with` statement. Otherwise, the Hub.__init__ deprecation - # warning will be confused with the transaction.finish deprecation warning that we are testing. - hub = sentry_sdk.Hub() - - with pytest.warns(DeprecationWarning): - transaction.finish(hub) - - -def test_no_warnings_scope_to_transaction_finish(): - transaction = sentry_sdk.tracing.Transaction() - with warnings.catch_warnings(): - warnings.simplefilter("error") - transaction.finish(sentry_sdk.Scope()) diff --git a/tests/tracing/test_http_headers.py b/tests/tracing/test_http_headers.py deleted file mode 100644 index 6a8467101e..0000000000 --- a/tests/tracing/test_http_headers.py +++ /dev/null @@ -1,56 +0,0 @@ -from unittest import mock - -import pytest - -from sentry_sdk.tracing import Transaction -from sentry_sdk.tracing_utils import extract_sentrytrace_data - - -@pytest.mark.parametrize("sampled", [True, False, None]) -def test_to_traceparent(sampled): - transaction = Transaction( - name="/interactions/other-dogs/new-dog", - op="greeting.sniff", - trace_id="12312012123120121231201212312012", - sampled=sampled, - ) - - traceparent = transaction.to_traceparent() - - parts = traceparent.split("-") - assert parts[0] == "12312012123120121231201212312012" # trace_id - assert parts[1] == transaction.span_id # parent_span_id - if sampled is None: - assert len(parts) == 2 - else: - assert parts[2] == "1" if sampled is True else "0" # sampled - - -@pytest.mark.parametrize("sampling_decision", [True, False]) -def test_sentrytrace_extraction(sampling_decision): - sentrytrace_header = "12312012123120121231201212312012-0415201309082013-{}".format( - 1 if sampling_decision is True else 0 - ) - assert extract_sentrytrace_data(sentrytrace_header) == { - "trace_id": "12312012123120121231201212312012", - "parent_span_id": "0415201309082013", - "parent_sampled": sampling_decision, - } - - -def test_iter_headers(monkeypatch): - monkeypatch.setattr( - Transaction, - "to_traceparent", - mock.Mock(return_value="12312012123120121231201212312012-0415201309082013-0"), - ) - - transaction = Transaction( - name="/interactions/other-dogs/new-dog", - op="greeting.sniff", - ) - - headers = dict(transaction.iter_headers()) - assert ( - headers["sentry-trace"] == "12312012123120121231201212312012-0415201309082013-0" - ) diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py index 61ef14b7d0..df6cf57e29 100644 --- a/tests/tracing/test_integration_tests.py +++ b/tests/tracing/test_integration_tests.py @@ -1,7 +1,5 @@ -import gc import re import sys -import weakref from unittest import mock import pytest @@ -9,12 +7,12 @@ import sentry_sdk from sentry_sdk import ( capture_message, + continue_trace, start_span, - start_transaction, ) from sentry_sdk.consts import SPANSTATUS from sentry_sdk.transport import Transport -from sentry_sdk.tracing import Transaction +from tests.conftest import SortedBaggage @pytest.mark.parametrize("sample_rate", [0.0, 1.0]) @@ -22,8 +20,8 @@ def test_basic(sentry_init, capture_events, sample_rate): sentry_init(traces_sample_rate=sample_rate) events = capture_events() - with start_transaction(name="hi") as transaction: - transaction.set_status(SPANSTATUS.OK) + with start_span(name="hi") as root_span: + root_span.set_status(SPANSTATUS.OK) with pytest.raises(ZeroDivisionError): with start_span(op="foo", name="foodesc"): 1 / 0 @@ -40,24 +38,21 @@ def test_basic(sentry_init, capture_events, sample_rate): span1, span2 = event["spans"] parent_span = event - assert span1["tags"]["status"] == "internal_error" + assert span1["status"] == "internal_error" assert span1["op"] == "foo" assert span1["description"] == "foodesc" - assert "status" not in span2.get("tags", {}) + assert span2["status"] == "ok" assert span2["op"] == "bar" assert span2["description"] == "bardesc" assert parent_span["transaction"] == "hi" - assert "status" not in event["tags"] + assert "status" not in event.get("tags", {}) assert event["contexts"]["trace"]["status"] == "ok" else: assert not events -@pytest.mark.parametrize("parent_sampled", [True, False, None]) @pytest.mark.parametrize("sample_rate", [0.0, 1.0]) -def test_continue_from_headers( - sentry_init, capture_envelopes, parent_sampled, sample_rate -): +def test_continue_trace(sentry_init, capture_envelopes, sample_rate): # noqa:N803 """ Ensure data is actually passed along via headers, and that they are read correctly. @@ -66,55 +61,41 @@ def test_continue_from_headers( envelopes = capture_envelopes() # make a parent transaction (normally this would be in a different service) - with start_transaction(name="hi", sampled=True if sample_rate == 0 else None): - with start_span() as old_span: - old_span.sampled = parent_sampled - headers = dict( - sentry_sdk.get_current_scope().iter_trace_propagation_headers(old_span) - ) - headers["baggage"] = ( - "other-vendor-value-1=foo;bar;baz, " - "sentry-trace_id=771a43a4192642f0b136d5159a501700, " - "sentry-public_key=49d0f7386ad645858ae85020e393bef3, " - "sentry-sample_rate=0.01337, sentry-user_id=Amelie, " - "other-vendor-value-2=foo;bar;" - ) + with start_span(name="hi"): + with start_span(name="inner") as old_span: + headers = dict(old_span.iter_headers()) + assert headers["sentry-trace"] + assert headers["baggage"] # child transaction, to prove that we can read 'sentry-trace' header data correctly - child_transaction = Transaction.continue_from_headers(headers, name="WRONG") - assert child_transaction is not None - assert child_transaction.parent_sampled == parent_sampled - assert child_transaction.trace_id == old_span.trace_id - assert child_transaction.same_process_as_parent is False - assert child_transaction.parent_span_id == old_span.span_id - assert child_transaction.span_id != old_span.span_id - - baggage = child_transaction._baggage - assert baggage - assert not baggage.mutable - assert baggage.sentry_items == { - "public_key": "49d0f7386ad645858ae85020e393bef3", - "trace_id": "771a43a4192642f0b136d5159a501700", - "user_id": "Amelie", - "sample_rate": "0.01337", - } - - # add child transaction to the scope, to show that the captured message will - # be tagged with the trace id (since it happens while the transaction is - # open) - with start_transaction(child_transaction): - # change the transaction name from "WRONG" to make sure the change - # is reflected in the final data - sentry_sdk.get_current_scope().transaction = "ho" - capture_message("hello") - - if parent_sampled is False or (sample_rate == 0 and parent_sampled is None): - # in this case the child transaction won't be captured - trace1, message = envelopes + with continue_trace(headers): + with start_span(name="WRONG") as child_root_span: + assert child_root_span is not None + assert child_root_span.sampled == (sample_rate == 1.0) + if child_root_span.sampled: + assert child_root_span.parent_span_id == old_span.span_id + assert child_root_span.trace_id == old_span.trace_id + assert child_root_span.span_id != old_span.span_id + + baggage = child_root_span.get_baggage() + assert baggage.serialize() == SortedBaggage(headers["baggage"]) + + # change the transaction name from "WRONG" to make sure the change + # is reflected in the final data + sentry_sdk.get_current_scope().set_transaction_name("ho") + # to show that the captured message will be tagged with the trace id + # (since it happens while the transaction is open) + capture_message("hello") + + # in this case the child transaction won't be captured + # but message follows twp spec + if sample_rate == 0.0: + (message,) = envelopes message_payload = message.get_event() - trace1_payload = trace1.get_transaction_event() - - assert trace1_payload["transaction"] == "hi" + assert message_payload["transaction"] == "ho" + assert ( + child_root_span.trace_id == message_payload["contexts"]["trace"]["trace_id"] + ) else: trace1, message, trace2 = envelopes trace1_payload = trace1.get_transaction_event() @@ -127,74 +108,42 @@ def test_continue_from_headers( assert ( trace1_payload["contexts"]["trace"]["trace_id"] == trace2_payload["contexts"]["trace"]["trace_id"] - == child_transaction.trace_id + == child_root_span.trace_id == message_payload["contexts"]["trace"]["trace_id"] ) - if parent_sampled is not None: - expected_sample_rate = str(float(parent_sampled)) - else: - expected_sample_rate = str(sample_rate) - assert trace2.headers["trace"] == baggage.dynamic_sampling_context() - assert trace2.headers["trace"] == { - "public_key": "49d0f7386ad645858ae85020e393bef3", - "trace_id": "771a43a4192642f0b136d5159a501700", - "user_id": "Amelie", - "sample_rate": expected_sample_rate, - } assert message_payload["message"] == "hello" -@pytest.mark.parametrize("sample_rate", [0.0, 1.0]) -def test_propagate_traces_deprecation_warning(sentry_init, sample_rate): - sentry_init(traces_sample_rate=sample_rate, propagate_traces=False) - - with start_transaction(name="hi"): - with start_span() as old_span: - with pytest.warns(DeprecationWarning): - dict( - sentry_sdk.get_current_scope().iter_trace_propagation_headers( - old_span - ) - ) - - @pytest.mark.parametrize("sample_rate", [0.5, 1.0]) def test_dynamic_sampling_head_sdk_creates_dsc( - sentry_init, capture_envelopes, sample_rate, monkeypatch + sentry_init, + capture_envelopes, + sample_rate, + monkeypatch, ): sentry_init(traces_sample_rate=sample_rate, release="foo") envelopes = capture_envelopes() # make sure transaction is sampled for both cases with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.25): - transaction = Transaction.continue_from_headers({}, name="Head SDK tx") - # will create empty mutable baggage - baggage = transaction._baggage - assert baggage - assert baggage.mutable - assert baggage.sentry_items == {} - assert baggage.third_party_items == "" - - with start_transaction(transaction): - with start_span(op="foo", name="foodesc"): - pass + with continue_trace({}): + with start_span(name="Head SDK tx"): + with start_span(op="foo", name="foodesc") as span: + baggage = span.get_baggage() - # finish will create a new baggage entry - baggage = transaction._baggage - trace_id = transaction.trace_id + trace_id = span.trace_id assert baggage - assert not baggage.mutable assert baggage.third_party_items == "" assert baggage.sentry_items == { "environment": "production", "release": "foo", "sample_rate": str(sample_rate), - "sampled": "true" if transaction.sampled else "false", + "sampled": "true" if span.sampled else "false", "sample_rand": "0.250000", "transaction": "Head SDK tx", "trace_id": trace_id, @@ -208,9 +157,9 @@ def test_dynamic_sampling_head_sdk_creates_dsc( "sentry-transaction=Head%%20SDK%%20tx," "sentry-sample_rate=%s," "sentry-sampled=%s" - % (trace_id, sample_rate, "true" if transaction.sampled else "false") + % (trace_id, sample_rate, "true" if span.sampled else "false") ) - assert baggage.serialize() == expected_baggage + assert baggage.serialize() == SortedBaggage(expected_baggage) (envelope,) = envelopes assert envelope.headers["trace"] == baggage.dynamic_sampling_context() @@ -218,42 +167,13 @@ def test_dynamic_sampling_head_sdk_creates_dsc( "environment": "production", "release": "foo", "sample_rate": str(sample_rate), + "sampled": "true" if span.sampled else "false", "sample_rand": "0.250000", - "sampled": "true" if transaction.sampled else "false", "transaction": "Head SDK tx", "trace_id": trace_id, } -@pytest.mark.parametrize( - "args,expected_refcount", - [({"traces_sample_rate": 1.0}, 100), ({"traces_sample_rate": 0.0}, 0)], -) -def test_memory_usage(sentry_init, capture_events, args, expected_refcount): - sentry_init(**args) - - references = weakref.WeakSet() - - with start_transaction(name="hi"): - for i in range(100): - with start_span(op="helloworld", name="hi {}".format(i)) as span: - - def foo(): - pass - - references.add(foo) - span.set_tag("foo", foo) - pass - - del foo - del span - - # required only for pypy (cpython frees immediately) - gc.collect() - - assert len(references) == expected_refcount - - def test_transactions_do_not_go_through_before_send(sentry_init, capture_events): def before_send(event, hint): raise RuntimeError("should not be called") @@ -261,7 +181,7 @@ def before_send(event, hint): sentry_init(traces_sample_rate=1.0, before_send=before_send) events = capture_events() - with start_transaction(name="/"): + with start_span(name="/"): pass assert len(events) == 1 @@ -279,7 +199,7 @@ def capture_event(self, event): sentry_init(traces_sample_rate=1, transport=CustomTransport()) events = capture_events() - with start_transaction(name="hi"): + with start_span(name="hi"): with start_span(op="bar", name="bardesc"): pass @@ -289,14 +209,14 @@ def capture_event(self, event): def test_trace_propagation_meta_head_sdk(sentry_init): sentry_init(traces_sample_rate=1.0, release="foo") - transaction = Transaction.continue_from_headers({}, name="Head SDK tx") meta = None span = None - with start_transaction(transaction): - with start_span(op="foo", name="foodesc") as current_span: - span = current_span - meta = sentry_sdk.get_current_scope().trace_propagation_meta() + with continue_trace({}): + with start_span(name="Head SDK tx") as root_span: + with start_span(op="foo", name="foodesc") as current_span: + span = current_span + meta = sentry_sdk.get_current_scope().trace_propagation_meta() ind = meta.find(">") + 1 sentry_trace, baggage = meta[:ind], meta[ind:] @@ -307,7 +227,7 @@ def test_trace_propagation_meta_head_sdk(sentry_init): assert 'meta name="baggage"' in baggage baggage_content = re.findall('content="([^"]*)"', baggage)[0] - assert baggage_content == transaction.get_baggage().serialize() + assert SortedBaggage(baggage_content) == root_span.get_baggage().serialize() @pytest.mark.parametrize( @@ -322,8 +242,8 @@ def test_non_error_exceptions( sentry_init(traces_sample_rate=1.0) events = capture_events() - with start_transaction(name="hi") as transaction: - transaction.set_status(SPANSTATUS.OK) + with start_span(name="hi") as root_span: + root_span.set_status(SPANSTATUS.OK) with pytest.raises(exception_cls): with start_span(op="foo", name="foodesc"): raise exception_cls(exception_value) @@ -333,7 +253,7 @@ def test_non_error_exceptions( span = event["spans"][0] assert "status" not in span.get("tags", {}) - assert "status" not in event["tags"] + assert "status" not in event.get("tags", {}) assert event["contexts"]["trace"]["status"] == "ok" @@ -344,8 +264,8 @@ def test_good_sysexit_doesnt_fail_transaction( sentry_init(traces_sample_rate=1.0) events = capture_events() - with start_transaction(name="hi") as transaction: - transaction.set_status(SPANSTATUS.OK) + with start_span(name="hi") as span: + span.set_status(SPANSTATUS.OK) with pytest.raises(SystemExit): with start_span(op="foo", name="foodesc"): if exception_value is not False: @@ -358,5 +278,5 @@ def test_good_sysexit_doesnt_fail_transaction( span = event["spans"][0] assert "status" not in span.get("tags", {}) - assert "status" not in event["tags"] + assert "status" not in event.get("tags", {}) assert event["contexts"]["trace"]["status"] == "ok" diff --git a/tests/tracing/test_misc.py b/tests/tracing/test_misc.py index b954d36e1a..4d85594324 100644 --- a/tests/tracing/test_misc.py +++ b/tests/tracing/test_misc.py @@ -1,14 +1,9 @@ import pytest -import gc -import uuid -import os -from unittest import mock from unittest.mock import MagicMock import sentry_sdk -from sentry_sdk import start_span, start_transaction, set_measurement +from sentry_sdk import start_span, get_current_scope from sentry_sdk.consts import MATCH_ALL -from sentry_sdk.tracing import Span, Transaction from sentry_sdk.tracing_utils import should_propagate_trace from sentry_sdk.utils import Dsn from tests.conftest import ApproxDict @@ -18,9 +13,9 @@ def test_span_trimming(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0, _experiments={"max_spans": 3}) events = capture_events() - with start_transaction(name="hi"): + with start_span(name="hi"): for i in range(10): - with start_span(op="foo{}".format(i)): + with start_span(op=f"foo{i}"): pass (event,) = events @@ -34,20 +29,19 @@ def test_span_trimming(sentry_init, capture_events): assert event["_meta"]["spans"][""]["len"] == 10 assert "_dropped_spans" not in event - assert "dropped_spans" not in event def test_span_data_scrubbing_and_trimming(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0, _experiments={"max_spans": 3}) events = capture_events() - with start_transaction(name="hi"): + with start_span(name="hi"): with start_span(op="foo", name="bar") as span: - span.set_data("password", "secret") - span.set_data("datafoo", "databar") + span.set_attribute("password", "secret") + span.set_attribute("datafoo", "databar") for i in range(10): - with start_span(op="foo{}".format(i)): + with start_span(op=f"foo{i}"): pass (event,) = events @@ -65,33 +59,33 @@ def test_transaction_naming(sentry_init, capture_events): events = capture_events() # default name in event if no name is passed - with start_transaction() as transaction: + with start_span(): pass assert len(events) == 1 - assert events[0]["transaction"] == "" + assert events[0]["transaction"] == "" # the name can be set once the transaction's already started - with start_transaction() as transaction: - transaction.name = "name-known-after-transaction-started" + with start_span() as span: + span.name = "name-known-after-transaction-started" assert len(events) == 2 assert events[1]["transaction"] == "name-known-after-transaction-started" # passing in a name works, too - with start_transaction(name="a"): + with start_span(name="a"): pass assert len(events) == 3 assert events[2]["transaction"] == "a" -def test_transaction_data(sentry_init, capture_events): +def test_root_span_data(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() - with start_transaction(name="test-transaction"): - span_or_tx = sentry_sdk.get_current_span() - span_or_tx.set_data("foo", "bar") + with start_span(name="test-root-span"): + root_span = sentry_sdk.get_current_span() + root_span.set_attribute("foo", "bar") with start_span(op="test-span") as span: - span.set_data("spanfoo", "spanbar") + span.set_attribute("spanfoo", "spanbar") assert len(events) == 1 @@ -110,259 +104,15 @@ def test_transaction_data(sentry_init, capture_events): assert span_data.items() >= {"spanfoo": "spanbar"}.items() -def test_start_transaction(sentry_init): +def test_finds_spans_on_scope(sentry_init): sentry_init(traces_sample_rate=1.0) - # you can have it start a transaction for you - result1 = start_transaction( - name="/interactions/other-dogs/new-dog", op="greeting.sniff" - ) - assert isinstance(result1, Transaction) - assert result1.name == "/interactions/other-dogs/new-dog" - assert result1.op == "greeting.sniff" - - # or you can pass it an already-created transaction - preexisting_transaction = Transaction( - name="/interactions/other-dogs/new-dog", op="greeting.sniff" - ) - result2 = start_transaction(preexisting_transaction) - assert result2 is preexisting_transaction - - -def test_finds_transaction_on_scope(sentry_init): - sentry_init(traces_sample_rate=1.0) - - transaction = start_transaction(name="dogpark") - - scope = sentry_sdk.get_current_scope() - - # See note in Scope class re: getters and setters of the `transaction` - # property. For the moment, assigning to scope.transaction merely sets the - # transaction name, rather than putting the transaction on the scope, so we - # have to assign to _span directly. - scope._span = transaction - - # Reading scope.property, however, does what you'd expect, and returns the - # transaction on the scope. - assert scope.transaction is not None - assert isinstance(scope.transaction, Transaction) - assert scope.transaction.name == "dogpark" - - # If the transaction is also set as the span on the scope, it can be found - # by accessing _span, too. - assert scope._span is not None - assert isinstance(scope._span, Transaction) - assert scope._span.name == "dogpark" - - -def test_finds_transaction_when_descendent_span_is_on_scope( - sentry_init, -): - sentry_init(traces_sample_rate=1.0) - - transaction = start_transaction(name="dogpark") - child_span = transaction.start_child(op="sniffing") - - scope = sentry_sdk.get_current_scope() - scope._span = child_span - - # this is the same whether it's the transaction itself or one of its - # decedents directly attached to the scope - assert scope.transaction is not None - assert isinstance(scope.transaction, Transaction) - assert scope.transaction.name == "dogpark" - - # here we see that it is in fact the span on the scope, rather than the - # transaction itself - assert scope._span is not None - assert isinstance(scope._span, Span) - assert scope._span.op == "sniffing" - - -def test_finds_orphan_span_on_scope(sentry_init): - # this is deprecated behavior which may be removed at some point (along with - # the start_span function) - sentry_init(traces_sample_rate=1.0) - - span = start_span(op="sniffing") - - scope = sentry_sdk.get_current_scope() - scope._span = span - - assert scope._span is not None - assert isinstance(scope._span, Span) - assert scope._span.op == "sniffing" - - -def test_finds_non_orphan_span_on_scope(sentry_init): - sentry_init(traces_sample_rate=1.0) - - transaction = start_transaction(name="dogpark") - child_span = transaction.start_child(op="sniffing") - - scope = sentry_sdk.get_current_scope() - scope._span = child_span - - assert scope._span is not None - assert isinstance(scope._span, Span) - assert scope._span.op == "sniffing" - - -def test_circular_references(monkeypatch, sentry_init, request): - # TODO: We discovered while writing this test about transaction/span - # reference cycles that there's actually also a circular reference in - # `serializer.py`, between the functions `_serialize_node` and - # `_serialize_node_impl`, both of which are defined inside of the main - # `serialize` function, and each of which calls the other one. For now, in - # order to avoid having those ref cycles give us a false positive here, we - # can mock out `serialize`. In the long run, though, we should probably fix - # that. (Whenever we do work on fixing it, it may be useful to add - # - # gc.set_debug(gc.DEBUG_LEAK) - # request.addfinalizer(lambda: gc.set_debug(~gc.DEBUG_LEAK)) - # - # immediately after the initial collection below, so we can see what new - # objects the garbage collector has to clean up once `transaction.finish` is - # called and the serializer runs.) - monkeypatch.setattr( - sentry_sdk.client, - "serialize", - mock.Mock( - return_value=None, - ), - ) - - # In certain versions of python, in some environments (specifically, python - # 3.4 when run in GH Actions), we run into a `ctypes` bug which creates - # circular references when `uuid4()` is called, as happens when we're - # generating event ids. Mocking it with an implementation which doesn't use - # the `ctypes` function lets us avoid having false positives when garbage - # collecting. See https://bugs.python.org/issue20519. - monkeypatch.setattr( - uuid, - "uuid4", - mock.Mock( - return_value=uuid.UUID(bytes=os.urandom(16)), - ), - ) - - gc.disable() - request.addfinalizer(gc.enable) - - sentry_init(traces_sample_rate=1.0) - - # Make sure that we're starting with a clean slate before we start creating - # transaction/span reference cycles - gc.collect() - - dogpark_transaction = start_transaction(name="dogpark") - sniffing_span = dogpark_transaction.start_child(op="sniffing") - wagging_span = dogpark_transaction.start_child(op="wagging") - - # At some point, you have to stop sniffing - there are balls to chase! - so finish - # this span while the dogpark transaction is still open - sniffing_span.finish() - - # The wagging, however, continues long past the dogpark, so that span will - # NOT finish before the transaction ends. (Doing it in this order proves - # that both finished and unfinished spans get their cycles broken.) - dogpark_transaction.finish() - - # Eventually you gotta sleep... - wagging_span.finish() - - # assuming there are no cycles by this point, these should all be able to go - # out of scope and get their memory deallocated without the garbage - # collector having anything to do - del sniffing_span - del wagging_span - del dogpark_transaction - - assert gc.collect() == 0 - - -def test_set_meaurement(sentry_init, capture_events): - sentry_init(traces_sample_rate=1.0) - - events = capture_events() - - transaction = start_transaction(name="measuring stuff") - - with pytest.raises(TypeError): - transaction.set_measurement() - - with pytest.raises(TypeError): - transaction.set_measurement("metric.foo") + with start_span(name="dogpark") as root_span: + assert get_current_scope().span == root_span - transaction.set_measurement("metric.foo", 123) - transaction.set_measurement("metric.bar", 456, unit="second") - transaction.set_measurement("metric.baz", 420.69, unit="custom") - transaction.set_measurement("metric.foobar", 12, unit="percent") - transaction.set_measurement("metric.foobar", 17.99, unit="percent") - - transaction.finish() - - (event,) = events - assert event["measurements"]["metric.foo"] == {"value": 123, "unit": ""} - assert event["measurements"]["metric.bar"] == {"value": 456, "unit": "second"} - assert event["measurements"]["metric.baz"] == {"value": 420.69, "unit": "custom"} - assert event["measurements"]["metric.foobar"] == {"value": 17.99, "unit": "percent"} - - -def test_set_meaurement_public_api(sentry_init, capture_events): - sentry_init(traces_sample_rate=1.0) - - events = capture_events() - - with start_transaction(name="measuring stuff"): - set_measurement("metric.foo", 123) - set_measurement("metric.bar", 456, unit="second") - - (event,) = events - assert event["measurements"]["metric.foo"] == {"value": 123, "unit": ""} - assert event["measurements"]["metric.bar"] == {"value": 456, "unit": "second"} - - -def test_set_measurement_deprecated(sentry_init): - sentry_init(traces_sample_rate=1.0) - - with start_transaction(name="measuring stuff") as trx: - with pytest.warns(DeprecationWarning): - set_measurement("metric.foo", 123) - - with pytest.warns(DeprecationWarning): - trx.set_measurement("metric.bar", 456) - - with start_span(op="measuring span") as span: - with pytest.warns(DeprecationWarning): - span.set_measurement("metric.baz", 420.69, unit="custom") - - -def test_set_meaurement_compared_to_set_data(sentry_init, capture_events): - """ - This is just a test to see the difference - between measurements and data in the resulting event payload. - """ - sentry_init(traces_sample_rate=1.0) - - events = capture_events() - - with start_transaction(name="measuring stuff") as transaction: - transaction.set_measurement("metric.foo", 123) - transaction.set_data("metric.bar", 456) - - with start_span(op="measuring span") as span: - span.set_measurement("metric.baz", 420.69, unit="custom") - span.set_data("metric.qux", 789) - - (event,) = events - assert event["measurements"]["metric.foo"] == {"value": 123, "unit": ""} - assert event["contexts"]["trace"]["data"]["metric.bar"] == 456 - assert event["spans"][0]["measurements"]["metric.baz"] == { - "value": 420.69, - "unit": "custom", - } - assert event["spans"][0]["data"]["metric.qux"] == 789 + with start_span(name="child") as child_span: + assert get_current_scope().span == child_span + assert child_span.root_span == root_span @pytest.mark.parametrize( @@ -444,68 +194,3 @@ def test_should_propagate_trace_to_sentry( client.transport.parsed_dsn = Dsn(dsn) assert should_propagate_trace(client, url) == expected_propagation_decision - - -def test_start_transaction_updates_scope_name_source(sentry_init): - sentry_init(traces_sample_rate=1.0) - - scope = sentry_sdk.get_current_scope() - - with start_transaction(name="foobar", source="route"): - assert scope._transaction == "foobar" - assert scope._transaction_info == {"source": "route"} - - -@pytest.mark.parametrize("sampled", (True, None)) -def test_transaction_dropped_debug_not_started(sentry_init, sampled): - sentry_init(enable_tracing=True) - - tx = Transaction(sampled=sampled) - - with mock.patch("sentry_sdk.tracing.logger") as mock_logger: - with tx: - pass - - mock_logger.debug.assert_any_call( - "Discarding transaction because it was not started with sentry_sdk.start_transaction" - ) - - with pytest.raises(AssertionError): - # We should NOT see the "sampled = False" message here - mock_logger.debug.assert_any_call( - "Discarding transaction because sampled = False" - ) - - -def test_transaction_dropeed_sampled_false(sentry_init): - sentry_init(enable_tracing=True) - - tx = Transaction(sampled=False) - - with mock.patch("sentry_sdk.tracing.logger") as mock_logger: - with sentry_sdk.start_transaction(tx): - pass - - mock_logger.debug.assert_any_call("Discarding transaction because sampled = False") - - with pytest.raises(AssertionError): - # We should not see the "not started" message here - mock_logger.debug.assert_any_call( - "Discarding transaction because it was not started with sentry_sdk.start_transaction" - ) - - -def test_transaction_not_started_warning(sentry_init): - sentry_init(enable_tracing=True) - - tx = Transaction() - - with mock.patch("sentry_sdk.tracing.logger") as mock_logger: - with tx: - pass - - mock_logger.debug.assert_any_call( - "Transaction was entered without being started with sentry_sdk.start_transaction." - "The transaction will not be sent to Sentry. To fix, start the transaction by" - "passing it to sentry_sdk.start_transaction." - ) diff --git a/tests/tracing/test_noop_span.py b/tests/tracing/test_noop_span.py deleted file mode 100644 index 36778cd485..0000000000 --- a/tests/tracing/test_noop_span.py +++ /dev/null @@ -1,52 +0,0 @@ -import sentry_sdk -from sentry_sdk.tracing import NoOpSpan - -# These tests make sure that the examples from the documentation [1] -# are working when OTel (OpenTelemetry) instrumentation is turned on, -# and therefore, the Sentry tracing should not do anything. -# -# 1: https://docs.sentry.io/platforms/python/performance/instrumentation/custom-instrumentation/ - - -def test_noop_start_transaction(sentry_init): - sentry_init(instrumenter="otel") - - with sentry_sdk.start_transaction( - op="task", name="test_transaction_name" - ) as transaction: - assert isinstance(transaction, NoOpSpan) - assert sentry_sdk.get_current_scope().span is transaction - - transaction.name = "new name" - - -def test_noop_start_span(sentry_init): - sentry_init(instrumenter="otel") - - with sentry_sdk.start_span(op="http", name="GET /") as span: - assert isinstance(span, NoOpSpan) - assert sentry_sdk.get_current_scope().span is span - - span.set_tag("http.response.status_code", 418) - span.set_data("http.entity_type", "teapot") - - -def test_noop_transaction_start_child(sentry_init): - sentry_init(instrumenter="otel") - - transaction = sentry_sdk.start_transaction(name="task") - assert isinstance(transaction, NoOpSpan) - - with transaction.start_child(op="child_task") as child: - assert isinstance(child, NoOpSpan) - assert sentry_sdk.get_current_scope().span is child - - -def test_noop_span_start_child(sentry_init): - sentry_init(instrumenter="otel") - span = sentry_sdk.start_span(name="task") - assert isinstance(span, NoOpSpan) - - with span.start_child(op="child_task") as child: - assert isinstance(child, NoOpSpan) - assert sentry_sdk.get_current_scope().span is child diff --git a/tests/tracing/test_propagation.py b/tests/tracing/test_propagation.py deleted file mode 100644 index 730bf2672b..0000000000 --- a/tests/tracing/test_propagation.py +++ /dev/null @@ -1,40 +0,0 @@ -import sentry_sdk -import pytest - - -def test_standalone_span_iter_headers(sentry_init): - sentry_init(enable_tracing=True) - - with sentry_sdk.start_span(op="test") as span: - with pytest.raises(StopIteration): - # We should not have any propagation headers - next(span.iter_headers()) - - -def test_span_in_span_iter_headers(sentry_init): - sentry_init(enable_tracing=True) - - with sentry_sdk.start_span(op="test"): - with sentry_sdk.start_span(op="test2") as span_inner: - with pytest.raises(StopIteration): - # We should not have any propagation headers - next(span_inner.iter_headers()) - - -def test_span_in_transaction(sentry_init): - sentry_init(enable_tracing=True) - - with sentry_sdk.start_transaction(op="test"): - with sentry_sdk.start_span(op="test2") as span: - # Ensure the headers are there - next(span.iter_headers()) - - -def test_span_in_span_in_transaction(sentry_init): - sentry_init(enable_tracing=True) - - with sentry_sdk.start_transaction(op="test"): - with sentry_sdk.start_span(op="test2"): - with sentry_sdk.start_span(op="test3") as span_inner: - # Ensure the headers are there - next(span_inner.iter_headers()) diff --git a/tests/tracing/test_sample_rand.py b/tests/tracing/test_sample_rand.py index f9c10aa04e..fe9f61716d 100644 --- a/tests/tracing/test_sample_rand.py +++ b/tests/tracing/test_sample_rand.py @@ -5,7 +5,7 @@ import pytest import sentry_sdk -from sentry_sdk.tracing_utils import Baggage +from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME @pytest.mark.parametrize("sample_rand", (0.0, 0.25, 0.5, 0.75)) @@ -22,9 +22,9 @@ def test_deterministic_sampled(sentry_init, capture_events, sample_rate, sample_ with mock.patch( "sentry_sdk.tracing_utils.Random.uniform", return_value=sample_rand ): - with sentry_sdk.start_transaction() as transaction: + with sentry_sdk.start_span() as root_span: assert ( - transaction.get_baggage().sentry_items["sample_rand"] + root_span.get_baggage().sentry_items["sample_rand"] == f"{sample_rand:.6f}" # noqa: E231 ) @@ -41,16 +41,20 @@ def test_transaction_uses_incoming_sample_rand( """ Test that the transaction uses the sample_rand value from the incoming baggage. """ - baggage = Baggage(sentry_items={"sample_rand": f"{sample_rand:.6f}"}) # noqa: E231 - sentry_init(traces_sample_rate=sample_rate) events = capture_events() - with sentry_sdk.start_transaction(baggage=baggage) as transaction: - assert ( - transaction.get_baggage().sentry_items["sample_rand"] - == f"{sample_rand:.6f}" # noqa: E231 - ) + baggage = f"sentry-sample_rand={sample_rand:.6f},sentry-trace_id=771a43a4192642f0b136d5159a501700" # noqa: E231 + sentry_trace = "771a43a4192642f0b136d5159a501700-1234567890abcdef" + + with sentry_sdk.continue_trace( + {BAGGAGE_HEADER_NAME: baggage, SENTRY_TRACE_HEADER_NAME: sentry_trace} + ): + with sentry_sdk.start_span() as root_span: + assert ( + root_span.get_baggage().sentry_items["sample_rand"] + == f"{sample_rand:.6f}" # noqa: E231 + ) # Transaction event captured if sample_rand < sample_rate, indicating that # sample_rand is used to make the sampling decision. @@ -77,13 +81,95 @@ def test_decimal_context(sentry_init, capture_events): with mock.patch( "sentry_sdk.tracing_utils.Random.uniform", return_value=0.123456789 ): - with sentry_sdk.start_transaction() as transaction: - assert ( - transaction.get_baggage().sentry_items["sample_rand"] == "0.123456" - ) + with sentry_sdk.start_span() as root_span: + assert root_span.get_baggage().sentry_items["sample_rand"] == "0.123456" finally: decimal.getcontext().prec = old_prec decimal.getcontext().traps[Inexact] = old_inexact decimal.getcontext().traps[FloatOperation] = old_float_operation assert len(events) == 1 + + +@pytest.mark.parametrize( + "incoming_sample_rand,expected_sample_rand", + ( + ("0.0100015", "0.0100015"), + ("0.1", "0.1"), + ), +) +def test_unexpected_incoming_sample_rand_precision( + sentry_init, capture_events, incoming_sample_rand, expected_sample_rand +): + """ + Test that incoming sample_rand is correctly interpreted even if it looks unexpected. + + We shouldn't be getting arbitrary precision sample_rand in incoming headers, + but if we do for some reason, check that we don't tamper with it. + """ + sentry_init(traces_sample_rate=1.0) + events = capture_events() + + baggage = f"sentry-sample_rand={incoming_sample_rand},sentry-trace_id=771a43a4192642f0b136d5159a501700" # noqa: E231 + sentry_trace = "771a43a4192642f0b136d5159a501700-1234567890abcdef" + + with sentry_sdk.continue_trace( + {BAGGAGE_HEADER_NAME: baggage, SENTRY_TRACE_HEADER_NAME: sentry_trace} + ): + with sentry_sdk.start_span() as root_span: + assert ( + root_span.get_baggage().sentry_items["sample_rand"] + == expected_sample_rand + ) + + assert len(events) == 1 + + +@pytest.mark.parametrize( + "incoming_sample_rand", + ("abc", "null", "47"), +) +def test_invalid_incoming_sample_rand(sentry_init, incoming_sample_rand): + """Test that we handle malformed incoming sample_rand.""" + sentry_init(traces_sample_rate=1.0) + + baggage = f"sentry-sample_rand={incoming_sample_rand},sentry-trace_id=771a43a4192642f0b136d5159a501700" # noqa: E231 + sentry_trace = "771a43a4192642f0b136d5159a501700-1234567890abcdef" + + with sentry_sdk.continue_trace( + {BAGGAGE_HEADER_NAME: baggage, SENTRY_TRACE_HEADER_NAME: sentry_trace} + ): + with sentry_sdk.start_span(): + pass + + # The behavior here is undefined since we got a broken incoming trace, + # so as long as the SDK doesn't produce an error we consider this + # testcase a success. + + +@pytest.mark.parametrize("incoming", ((0.0, "true"), (1.0, "false"))) +def test_invalid_incoming_sampled_and_sample_rate(sentry_init, incoming): + """ + Test that we don't error out in case we can't generate a sample_rand that + would respect the incoming sampled and sample_rate. + """ + sentry_init(traces_sample_rate=1.0) + + sample_rate, sampled = incoming + + baggage = ( + f"sentry-sample_rate={sample_rate}," # noqa: E231 + f"sentry-sampled={sampled}," # noqa: E231 + "sentry-trace_id=771a43a4192642f0b136d5159a501700" + ) + sentry_trace = f"771a43a4192642f0b136d5159a501700-1234567890abcdef-{1 if sampled == 'true' else 0}" + + with sentry_sdk.continue_trace( + {BAGGAGE_HEADER_NAME: baggage, SENTRY_TRACE_HEADER_NAME: sentry_trace} + ): + with sentry_sdk.start_span(): + pass + + # The behavior here is undefined since we got a broken incoming trace, + # so as long as the SDK doesn't produce an error we consider this + # testcase a success. diff --git a/tests/tracing/test_sample_rand_propagation.py b/tests/tracing/test_sample_rand_propagation.py index ea3ea548ff..17bf7a6168 100644 --- a/tests/tracing/test_sample_rand_propagation.py +++ b/tests/tracing/test_sample_rand_propagation.py @@ -7,37 +7,38 @@ """ from unittest import mock -from unittest.mock import Mock import sentry_sdk -def test_continue_trace_with_sample_rand(): +def test_continue_trace_with_sample_rand(sentry_init): """ Test that an incoming sample_rand is propagated onto the transaction's baggage. """ + sentry_init() + headers = { - "sentry-trace": "00000000000000000000000000000000-0000000000000000-0", + "sentry-trace": "771a43a4192642f0b136d5159a501700-1234567890abcdef-0", "baggage": "sentry-sample_rand=0.1,sentry-sample_rate=0.5", } - transaction = sentry_sdk.continue_trace(headers) - assert transaction.get_baggage().sentry_items["sample_rand"] == "0.1" + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span(name="root-span") as root_span: + assert root_span.get_baggage().sentry_items["sample_rand"] == "0.1" -def test_continue_trace_missing_sample_rand(): +def test_continue_trace_missing_sample_rand(sentry_init): """ Test that a missing sample_rand is filled in onto the transaction's baggage. """ + sentry_init() headers = { - "sentry-trace": "00000000000000000000000000000000-0000000000000000", + "sentry-trace": "771a43a4192642f0b136d5159a501700-1234567890abcdef", "baggage": "sentry-placeholder=asdf", } - mock_uniform = Mock(return_value=0.5) - - with mock.patch("sentry_sdk.tracing_utils.Random.uniform", mock_uniform): - transaction = sentry_sdk.continue_trace(headers) - - assert transaction.get_baggage().sentry_items["sample_rand"] == "0.500000" + with mock.patch("sentry_sdk.tracing_utils.Random.uniform", return_value=0.5): + with sentry_sdk.continue_trace(headers): + with sentry_sdk.start_span(name="root-span") as root_span: + assert root_span.get_baggage().sentry_items["sample_rand"] == "0.500000" diff --git a/tests/tracing/test_sampling.py b/tests/tracing/test_sampling.py index 1761a3dbac..bfd845d26d 100644 --- a/tests/tracing/test_sampling.py +++ b/tests/tracing/test_sampling.py @@ -5,36 +5,35 @@ import pytest import sentry_sdk -from sentry_sdk import start_span, start_transaction, capture_exception -from sentry_sdk.tracing import Transaction -from sentry_sdk.tracing_utils import Baggage +from sentry_sdk import start_span, capture_exception +from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME from sentry_sdk.utils import logger -def test_sampling_decided_only_for_transactions(sentry_init, capture_events): +def test_sampling_decided_only_for_root_spans(sentry_init): sentry_init(traces_sample_rate=0.5) - with start_transaction(name="hi") as transaction: - assert transaction.sampled is not None + with start_span(name="outer1") as root_span1: + assert root_span1.sampled is not None - with start_span() as span: - assert span.sampled == transaction.sampled + with start_span(name="inner") as span: + assert span.sampled == root_span1.sampled - with start_span() as span: - assert span.sampled is None + with start_span(name="outer2") as root_span2: + assert root_span2.sampled is not None @pytest.mark.parametrize("sampled", [True, False]) -def test_nested_transaction_sampling_override(sentry_init, sampled): +def test_nested_span_sampling_override(sentry_init, sampled): sentry_init(traces_sample_rate=1.0) - with start_transaction(name="outer", sampled=sampled) as outer_transaction: - assert outer_transaction.sampled is sampled - with start_transaction( - name="inner", sampled=(not sampled) - ) as inner_transaction: - assert inner_transaction.sampled is not sampled - assert outer_transaction.sampled is sampled + with start_span(name="outer", sampled=sampled) as outer_span: + assert outer_span.sampled is sampled + with start_span(name="inner", sampled=(not sampled)) as inner_span: + # won't work because the child span inherits the sampling decision + # from the parent + assert inner_span.sampled is sampled + assert outer_span.sampled is sampled def test_no_double_sampling(sentry_init, capture_events): @@ -43,26 +42,12 @@ def test_no_double_sampling(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0, sample_rate=0.0) events = capture_events() - with start_transaction(name="/"): + with start_span(name="/"): pass assert len(events) == 1 -@pytest.mark.parametrize("sampling_decision", [True, False]) -def test_get_transaction_and_span_from_scope_regardless_of_sampling_decision( - sentry_init, sampling_decision -): - sentry_init(traces_sample_rate=1.0) - - with start_transaction(name="/", sampled=sampling_decision): - with start_span(op="child-span"): - with start_span(op="child-child-span"): - scope = sentry_sdk.get_current_scope() - assert scope.span.op == "child-child-span" - assert scope.transaction.name == "/" - - @pytest.mark.parametrize( "traces_sample_rate,expected_decision", [(0.0, False), (0.25, False), (0.75, True), (1.00, True)], @@ -74,9 +59,14 @@ def test_uses_traces_sample_rate_correctly( ): sentry_init(traces_sample_rate=traces_sample_rate) - baggage = Baggage(sentry_items={"sample_rand": "0.500000"}) - transaction = start_transaction(name="dogpark", baggage=baggage) - assert transaction.sampled is expected_decision + with sentry_sdk.continue_trace( + { + BAGGAGE_HEADER_NAME: "sentry-sample_rand=0.500000,sentry-trace_id=397f36434d07b20135324b2e6ae70c77", + SENTRY_TRACE_HEADER_NAME: "397f36434d07b20135324b2e6ae70c77-1234567890abcdef", + } + ): + with start_span(name="dogpark") as root_span: + assert root_span.sampled is expected_decision @pytest.mark.parametrize( @@ -90,9 +80,14 @@ def test_uses_traces_sampler_return_value_correctly( ): sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value)) - baggage = Baggage(sentry_items={"sample_rand": "0.500000"}) - transaction = start_transaction(name="dogpark", baggage=baggage) - assert transaction.sampled is expected_decision + with sentry_sdk.continue_trace( + { + BAGGAGE_HEADER_NAME: "sentry-sample_rand=0.500000,sentry-trace_id=397f36434d07b20135324b2e6ae70c77", + SENTRY_TRACE_HEADER_NAME: "397f36434d07b20135324b2e6ae70c77-1234567890abcdef", + } + ): + with start_span(name="dogpark") as root_span: + assert root_span.sampled is expected_decision @pytest.mark.parametrize("traces_sampler_return_value", [True, False]) @@ -101,8 +96,8 @@ def test_tolerates_traces_sampler_returning_a_boolean( ): sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value)) - transaction = start_transaction(name="dogpark") - assert transaction.sampled is traces_sampler_return_value + with start_span(name="dogpark") as span: + assert span.sampled is traces_sampler_return_value @pytest.mark.parametrize("sampling_decision", [True, False]) @@ -112,8 +107,8 @@ def test_only_captures_transaction_when_sampled_is_true( sentry_init(traces_sampler=mock.Mock(return_value=sampling_decision)) events = capture_events() - transaction = start_transaction(name="dogpark") - transaction.finish() + with start_span(name="dogpark"): + pass assert len(events) == (1 if sampling_decision else 0) @@ -134,9 +129,9 @@ def test_prefers_traces_sampler_to_traces_sample_rate( traces_sampler=traces_sampler, ) - transaction = start_transaction(name="dogpark") - assert traces_sampler.called is True - assert transaction.sampled is traces_sampler_return_value + with start_span(name="dogpark") as span: + assert traces_sampler.called is True + assert span.sampled is traces_sampler_return_value @pytest.mark.parametrize("parent_sampling_decision", [True, False]) @@ -148,10 +143,17 @@ def test_ignores_inherited_sample_decision_when_traces_sampler_defined( traces_sampler = mock.Mock(return_value=not parent_sampling_decision) sentry_init(traces_sampler=traces_sampler) - transaction = start_transaction( - name="dogpark", parent_sampled=parent_sampling_decision + sentry_trace_header = ( + "12312012123120121231201212312012-1121201211212012-{sampled}".format( + sampled=int(parent_sampling_decision) + ) ) - assert transaction.sampled is not parent_sampling_decision + + with sentry_sdk.continue_trace({"sentry-trace": sentry_trace_header}): + with sentry_sdk.start_span(name="dogpark") as span: + pass + + assert span.sampled is not parent_sampling_decision @pytest.mark.parametrize("explicit_decision", [True, False]) @@ -163,8 +165,8 @@ def test_traces_sampler_doesnt_overwrite_explicitly_passed_sampling_decision( traces_sampler = mock.Mock(return_value=not explicit_decision) sentry_init(traces_sampler=traces_sampler) - transaction = start_transaction(name="dogpark", sampled=explicit_decision) - assert transaction.sampled is explicit_decision + with start_span(name="dogpark", sampled=explicit_decision) as span: + assert span.sampled is explicit_decision @pytest.mark.parametrize("parent_sampling_decision", [True, False]) @@ -177,18 +179,26 @@ def test_inherits_parent_sampling_decision_when_traces_sampler_undefined( sentry_init(traces_sample_rate=0.5) mock_random_value = 0.25 if parent_sampling_decision is False else 0.75 - with mock.patch.object(random, "random", return_value=mock_random_value): - transaction = start_transaction( - name="dogpark", parent_sampled=parent_sampling_decision + sentry_trace_header = ( + "12312012123120121231201212312012-1121201211212012-{sampled}".format( + sampled=int(parent_sampling_decision) ) - assert transaction.sampled is parent_sampling_decision + ) + with mock.patch.object(random, "random", return_value=mock_random_value): + with sentry_sdk.continue_trace({"sentry-trace": sentry_trace_header}): + with start_span(name="dogpark") as span: + assert span.sampled is parent_sampling_decision @pytest.mark.parametrize("parent_sampling_decision", [True, False]) def test_passes_parent_sampling_decision_in_sampling_context( sentry_init, parent_sampling_decision ): - sentry_init(traces_sample_rate=1.0) + def dummy_traces_sampler(sampling_context): + assert sampling_context["parent_sampled"] is parent_sampling_decision + return 1.0 + + sentry_init(traces_sample_rate=1.0, traces_sampler=dummy_traces_sampler) sentry_trace_header = ( "12312012123120121231201212312012-1121201211212012-{sampled}".format( @@ -196,32 +206,21 @@ def test_passes_parent_sampling_decision_in_sampling_context( ) ) - transaction = Transaction.continue_from_headers( - headers={"sentry-trace": sentry_trace_header}, name="dogpark" - ) - - def mock_set_initial_sampling_decision(_, sampling_context): - assert "parent_sampled" in sampling_context - assert sampling_context["parent_sampled"] is parent_sampling_decision + with sentry_sdk.continue_trace({"sentry-trace": sentry_trace_header}): + with sentry_sdk.start_span(name="dogpark"): + pass - with mock.patch( - "sentry_sdk.tracing.Transaction._set_initial_sampling_decision", - mock_set_initial_sampling_decision, - ): - start_transaction(transaction=transaction) - -def test_passes_custom_sampling_context_from_start_transaction_to_traces_sampler( +def test_passes_attributes_from_start_span_to_traces_sampler( sentry_init, DictionaryContaining # noqa: N803 ): traces_sampler = mock.Mock() sentry_init(traces_sampler=traces_sampler) - start_transaction(custom_sampling_context={"dogs": "yes", "cats": "maybe"}) - - traces_sampler.assert_any_call( - DictionaryContaining({"dogs": "yes", "cats": "maybe"}) - ) + with start_span(attributes={"dogs": "yes", "cats": "maybe"}): + traces_sampler.assert_any_call( + DictionaryContaining({"dogs": "yes", "cats": "maybe"}) + ) def test_sample_rate_affects_errors(sentry_init, capture_events): @@ -256,9 +255,11 @@ def test_warns_and_sets_sampled_to_false_on_invalid_traces_sampler_return_value( sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value)) with mock.patch.object(logger, "warning", mock.Mock()): - transaction = start_transaction(name="dogpark") - logger.warning.assert_any_call(StringContaining("Given sample rate is invalid")) - assert transaction.sampled is False + with start_span(name="dogpark") as span: + logger.warning.assert_any_call( + StringContaining("Given sample rate is invalid") + ) + assert span.sampled is False @pytest.mark.parametrize( @@ -283,9 +284,8 @@ def test_records_lost_event_only_if_traces_sample_rate_enabled( sentry_init(traces_sample_rate=traces_sample_rate) record_lost_event_calls = capture_record_lost_event_calls() - transaction = start_transaction(name="dogpark") - assert transaction.sampled is sampled_output - transaction.finish() + with start_span(name="dogpark") as span: + assert span.sampled is sampled_output # Use Counter because order of calls does not matter assert Counter(record_lost_event_calls) == Counter(expected_record_lost_event_calls) @@ -310,12 +310,35 @@ def test_records_lost_event_only_if_traces_sampler_enabled( sampled_output, expected_record_lost_event_calls, ): - sentry_init(traces_sampler=traces_sampler) + sentry_init( + traces_sample_rate=None, + traces_sampler=traces_sampler, + ) record_lost_event_calls = capture_record_lost_event_calls() - transaction = start_transaction(name="dogpark") - assert transaction.sampled is sampled_output - transaction.finish() + with start_span(name="dogpark") as span: + assert span.sampled is sampled_output # Use Counter because order of calls does not matter assert Counter(record_lost_event_calls) == Counter(expected_record_lost_event_calls) + + +@pytest.mark.parametrize("parent_sampling_decision", [True, False]) +def test_profiles_sampler_gets_sampling_context(sentry_init, parent_sampling_decision): + def dummy_profiles_sampler(sampling_context): + assert sampling_context["transaction_context"] == { + "name": "dogpark", + "op": "op", + "source": "custom", + } + assert sampling_context["parent_sampled"] == parent_sampling_decision + return 1.0 + + sentry_init(traces_sample_rate=1.0, profiles_sampler=dummy_profiles_sampler) + + sentry_trace = "12312012123120121231201212312012-1121201211212012-{}".format( + int(parent_sampling_decision) + ) + with sentry_sdk.continue_trace({"sentry-trace": sentry_trace}): + with sentry_sdk.start_span(name="dogpark", op="op"): + pass diff --git a/tests/tracing/test_span_name.py b/tests/tracing/test_span_name.py index 9c1768990a..d7d3772727 100644 --- a/tests/tracing/test_span_name.py +++ b/tests/tracing/test_span_name.py @@ -1,27 +1,11 @@ -import pytest - import sentry_sdk -def test_start_span_description(sentry_init, capture_events): - sentry_init(traces_sample_rate=1.0) - events = capture_events() - - with sentry_sdk.start_transaction(name="hi"): - with pytest.deprecated_call(): - with sentry_sdk.start_span(op="foo", description="span-desc"): - ... - - (event,) = events - - assert event["spans"][0]["description"] == "span-desc" - - def test_start_span_name(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() - with sentry_sdk.start_transaction(name="hi"): + with sentry_sdk.start_span(name="hi"): with sentry_sdk.start_span(op="foo", name="span-name"): ... @@ -30,26 +14,11 @@ def test_start_span_name(sentry_init, capture_events): assert event["spans"][0]["description"] == "span-name" -def test_start_child_description(sentry_init, capture_events): - sentry_init(traces_sample_rate=1.0) - events = capture_events() - - with sentry_sdk.start_transaction(name="hi"): - with pytest.deprecated_call(): - with sentry_sdk.start_span(op="foo", description="span-desc") as span: - with span.start_child(op="bar", description="child-desc"): - ... - - (event,) = events - - assert event["spans"][-1]["description"] == "child-desc" - - def test_start_child_name(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() - with sentry_sdk.start_transaction(name="hi"): + with sentry_sdk.start_span(name="hi"): with sentry_sdk.start_span(op="foo", name="span-name") as span: with span.start_child(op="bar", name="child-name"): ... diff --git a/tests/tracing/test_span_origin.py b/tests/tracing/test_span_origin.py index 16635871b3..649f704b1b 100644 --- a/tests/tracing/test_span_origin.py +++ b/tests/tracing/test_span_origin.py @@ -1,11 +1,11 @@ -from sentry_sdk import start_transaction, start_span +from sentry_sdk import start_span def test_span_origin_manual(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() - with start_transaction(name="hi"): + with start_span(name="hi"): with start_span(op="foo", name="bar"): pass @@ -20,11 +20,11 @@ def test_span_origin_custom(sentry_init, capture_events): sentry_init(traces_sample_rate=1.0) events = capture_events() - with start_transaction(name="hi"): + with start_span(name="hi"): with start_span(op="foo", name="bar", origin="foo.foo2.foo3"): pass - with start_transaction(name="ho", origin="ho.ho2.ho3"): + with start_span(name="ho", origin="ho.ho2.ho3"): with start_span(op="baz", name="qux", origin="baz.baz2.baz3"): pass diff --git a/tests/tracing/test_trace_propagation.py b/tests/tracing/test_trace_propagation.py new file mode 100644 index 0000000000..358e3f48aa --- /dev/null +++ b/tests/tracing/test_trace_propagation.py @@ -0,0 +1,290 @@ +import pytest +import requests +import sentry_sdk +from http.client import HTTPConnection + +USE_DEFAULT_TRACES_SAMPLE_RATE = -1 + +INCOMING_TRACE_ID = "771a43a4192642f0b136d5159a501700" +INCOMING_HEADERS = { + "sentry-trace": f"{INCOMING_TRACE_ID}-1234567890abcdef", + "baggage": ( + f"sentry-trace_id={INCOMING_TRACE_ID}, " + "sentry-public_key=frontendpublickey," + "sentry-sample_rate=0.01337," + "sentry-release=myfrontend," + "sentry-environment=bird," + "sentry-transaction=bar" + ), +} + + +# +# Proper high level testing for trace propagation. +# Testing the matrix of test cases described here: +# https://develop.sentry.dev/sdk/telemetry/traces/trace-propagation-cheat-sheet/ +# + + +@pytest.fixture +def _mock_putheader(monkeypatch): + """ + Mock HTTPConnection.putheader to capture calls to it. + """ + putheader_calls = [] + original_putheader = HTTPConnection.putheader + + def mock_putheader_fn(self, header, value): + putheader_calls.append((header, value)) + return original_putheader(self, header, value) + + monkeypatch.setattr(HTTPConnection, "putheader", mock_putheader_fn) + return putheader_calls + + +@pytest.mark.parametrize( + "traces_sample_rate", + [ + USE_DEFAULT_TRACES_SAMPLE_RATE, + None, + 0, + 1, + ], + ids=[ + "traces_sample_rate=DEFAULT", + "traces_sample_rate=None", + "traces_sample_rate=0", + "traces_sample_rate=1", + ], +) +def test_no_incoming_trace_and_trace_propagation_targets_matching( + sentry_init, capture_events, _mock_putheader, traces_sample_rate +): + init_kwargs = {} + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: + init_kwargs["traces_sample_rate"] = traces_sample_rate + sentry_init(**init_kwargs) + + events = capture_events() + + NO_INCOMING_HEADERS = {} # noqa: N806 + + with sentry_sdk.continue_trace(NO_INCOMING_HEADERS): + with sentry_sdk.start_span(op="test", name="test"): + requests.get("http://example.com") + + # CHECK if performance data (a transaction/span) is sent to Sentry + if traces_sample_rate == 1: + assert len(events) == 1 + else: + assert len(events) == 0 + + outgoing_request_headers = {key: value for key, value in _mock_putheader} + + # CHECK if trace information is added to the outgoing request + assert "sentry-trace" in outgoing_request_headers + assert "baggage" in outgoing_request_headers + + # CHECK if incoming trace is continued + # (no assert necessary, because there is no incoming trace information) + + +@pytest.mark.parametrize( + "traces_sample_rate", + [ + USE_DEFAULT_TRACES_SAMPLE_RATE, + None, + 0, + 1, + ], + ids=[ + "traces_sample_rate=DEFAULT", + "traces_sample_rate=None", + "traces_sample_rate=0", + "traces_sample_rate=1", + ], +) +def test_no_incoming_trace_and_trace_propagation_targets_not_matching( + sentry_init, capture_events, _mock_putheader, traces_sample_rate +): + init_kwargs = { + "trace_propagation_targets": [ + "http://someothersite.com", + ], + } + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: + init_kwargs["traces_sample_rate"] = traces_sample_rate + sentry_init(**init_kwargs) + + events = capture_events() + + NO_INCOMING_HEADERS = {} # noqa: N806 + + with sentry_sdk.continue_trace(NO_INCOMING_HEADERS): + with sentry_sdk.start_span(op="test", name="test"): + requests.get("http://example.com") + + # CHECK if performance data (a transaction/span) is sent to Sentry + if traces_sample_rate == 1: + assert len(events) == 1 + else: + assert len(events) == 0 + + outgoing_request_headers = {key: value for key, value in _mock_putheader} + + # CHECK if trace information is added to the outgoing request + assert "sentry-trace" not in outgoing_request_headers + assert "baggage" not in outgoing_request_headers + + # CHECK if incoming trace is continued + # (no assert necessary, because there is no incoming trace information, and no outgoing trace information either) + + +@pytest.mark.parametrize( + "traces_sample_rate", + [ + USE_DEFAULT_TRACES_SAMPLE_RATE, + None, + 0, + 1, + ], + ids=[ + "traces_sample_rate=DEFAULT", + "traces_sample_rate=None", + "traces_sample_rate=0", + "traces_sample_rate=1", + ], +) +@pytest.mark.parametrize( + "incoming_parent_sampled", + ["deferred", "1", "0"], + ids=[ + "incoming_parent_sampled=DEFERRED", + "incoming_parent_sampled=1", + "incoming_parent_sampled=0", + ], +) +def test_with_incoming_trace_and_trace_propagation_targets_matching( + sentry_init, + capture_events, + _mock_putheader, + incoming_parent_sampled, + traces_sample_rate, +): + init_kwargs = {} + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: + init_kwargs["traces_sample_rate"] = traces_sample_rate + sentry_init(**init_kwargs) + + events = capture_events() + + incoming_headers = INCOMING_HEADERS.copy() + if incoming_parent_sampled != "deferred": + incoming_headers["sentry-trace"] += f"-{incoming_parent_sampled}" + incoming_headers[ + "baggage" + ] += f',sentry-sampled={"true" if incoming_parent_sampled == "1" else "false"}' # noqa: E231 + + with sentry_sdk.continue_trace(incoming_headers): + with sentry_sdk.start_span(op="test", name="test"): + requests.get("http://example.com") + + # CHECK if performance data (a transaction/span) is sent to Sentry + if ( + traces_sample_rate is None + or traces_sample_rate == USE_DEFAULT_TRACES_SAMPLE_RATE + or incoming_parent_sampled == "0" + ): + assert len(events) == 0 + else: + if incoming_parent_sampled == "1" or traces_sample_rate == 1: + assert len(events) == 1 + else: + assert len(events) == 0 + + outgoing_request_headers = {key: value for key, value in _mock_putheader} + + # CHECK if trace information is added to the outgoing request + assert "sentry-trace" in outgoing_request_headers + assert "baggage" in outgoing_request_headers + + # CHECK if incoming trace is continued + # Always continue the incoming trace, no matter traces_sample_rate + assert INCOMING_TRACE_ID in outgoing_request_headers["sentry-trace"] + assert INCOMING_TRACE_ID in outgoing_request_headers["baggage"] + + +@pytest.mark.parametrize( + "traces_sample_rate", + [ + USE_DEFAULT_TRACES_SAMPLE_RATE, + None, + 0, + 1, + ], + ids=[ + "traces_sample_rate=DEFAULT", + "traces_sample_rate=None", + "traces_sample_rate=0", + "traces_sample_rate=1", + ], +) +@pytest.mark.parametrize( + "incoming_parent_sampled", + ["deferred", "1", "0"], + ids=[ + "incoming_parent_sampled=DEFERRED", + "incoming_parent_sampled=1", + "incoming_parent_sampled=0", + ], +) +def test_with_incoming_trace_and_trace_propagation_targets_not_matching( + sentry_init, + capture_events, + _mock_putheader, + incoming_parent_sampled, + traces_sample_rate, +): + init_kwargs = { + "trace_propagation_targets": [ + "http://someothersite.com", + ], + } + if traces_sample_rate != USE_DEFAULT_TRACES_SAMPLE_RATE: + init_kwargs["traces_sample_rate"] = traces_sample_rate + sentry_init(**init_kwargs) + + events = capture_events() + + incoming_headers = INCOMING_HEADERS.copy() + if incoming_parent_sampled != "deferred": + incoming_headers["sentry-trace"] += f"-{incoming_parent_sampled}" + incoming_headers[ + "baggage" + ] += f',sentry-sampled={"true" if incoming_parent_sampled == "1" else "false"}' # noqa: E231 + + with sentry_sdk.continue_trace(incoming_headers): + with sentry_sdk.start_span(op="test", name="test"): + requests.get("http://example.com") + + # CHECK if performance data (a transaction/span) is sent to Sentry + if ( + traces_sample_rate is None + or traces_sample_rate == USE_DEFAULT_TRACES_SAMPLE_RATE + or incoming_parent_sampled == "0" + ): + assert len(events) == 0 + else: + if incoming_parent_sampled == "1" or traces_sample_rate == 1: + assert len(events) == 1 + else: + assert len(events) == 0 + + outgoing_request_headers = {key: value for key, value in _mock_putheader} + + # CHECK if trace information is added to the outgoing request + assert "sentry-trace" not in outgoing_request_headers + assert "baggage" not in outgoing_request_headers + + # CHECK if incoming trace is continued + # (no assert necessary, because the trace information is not added to the outgoing request (see previous asserts)) diff --git a/tox.ini b/tox.ini index 4c05bcaa75..a3272189a5 100644 --- a/tox.ini +++ b/tox.ini @@ -17,11 +17,8 @@ requires = # This version introduced using pip 24.1 which does not work with older Celery and HTTPX versions. virtualenv<20.26.3 envlist = - # === Common === - {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common - # === Gevent === - {py3.6,py3.8,py3.10,py3.11,py3.12}-gevent + {py3.8,py3.10,py3.11,py3.12}-gevent # === Integrations === # General format is {pythonversion}-{integrationname}-v{frameworkversion} @@ -55,24 +52,24 @@ envlist = {py3.8,py3.11}-beam-latest # Boto3 - {py3.6,py3.7}-boto3-v{1.12} + {py3.7}-boto3-v{1.12} {py3.7,py3.11,py3.12}-boto3-v{1.23} {py3.11,py3.12}-boto3-v{1.34} {py3.11,py3.12,py3.13}-boto3-latest # Chalice - {py3.6,py3.9}-chalice-v{1.16} + {py3.7,py3.9}-chalice-v{1.16} {py3.8,py3.12,py3.13}-chalice-latest # Cloud Resource Context - {py3.6,py3.12,py3.13}-cloud_resource_context + {py3.7,py3.12,py3.13}-cloud_resource_context # GCP {py3.7}-gcp # HTTPX - {py3.6,py3.9}-httpx-v{0.16,0.18} - {py3.6,py3.10}-httpx-v{0.20,0.22} + {py3.7,py3.9}-httpx-v{0.16,0.18} + {py3.7,py3.10}-httpx-v{0.20,0.22} {py3.7,py3.11,py3.12}-httpx-v{0.23,0.24} {py3.9,py3.11,py3.12}-httpx-v{0.25,0.27} {py3.9,py3.12,py3.13}-httpx-latest @@ -90,14 +87,8 @@ envlist = {py3.9,py3.11,py3.12}-openai-latest {py3.9,py3.11,py3.12}-openai-notiktoken - # OpenTelemetry (OTel) - {py3.7,py3.9,py3.12,py3.13}-opentelemetry - - # OpenTelemetry Experimental (POTel) - {py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-potel - # pure_eval - {py3.6,py3.12,py3.13}-pure_eval + {py3.7,py3.12,py3.13}-pure_eval # Quart {py3.7,py3.11}-quart-v{0.16} @@ -109,24 +100,23 @@ envlist = {py3.10,py3.11}-ray-latest # Redis - {py3.6,py3.8}-redis-v{3} + {py3.7,py3.8}-redis-v{3} {py3.7,py3.8,py3.11}-redis-v{4} {py3.7,py3.11,py3.12}-redis-v{5} {py3.7,py3.12,py3.13}-redis-latest # Requests - {py3.6,py3.8,py3.12,py3.13}-requests + {py3.7,py3.8,py3.12,py3.13}-requests # RQ (Redis Queue) - {py3.6}-rq-v{0.6} - {py3.6,py3.9}-rq-v{0.13,1.0} - {py3.6,py3.11}-rq-v{1.5,1.10} + {py3.7,py3.9}-rq-v{0.13,1.0} + {py3.7,py3.11}-rq-v{1.5,1.10} {py3.7,py3.11,py3.12}-rq-v{1.15,1.16} {py3.7,py3.12,py3.13}-rq-latest # Sanic - {py3.6,py3.7}-sanic-v{0.8} - {py3.6,py3.8}-sanic-v{20} + {py3.7}-sanic-v{0.8} + {py3.8}-sanic-v{20} {py3.8,py3.11,py3.12}-sanic-v{24.6} {py3.9,py3.12,py3.13}-sanic-latest @@ -134,6 +124,13 @@ envlist = # These come from the populate_tox.py script. Eventually we should move all # integration tests there. + # ~~~ Common ~~~ + {py3.7,py3.8,py3.9}-common-v1.4.1 + {py3.7,py3.8,py3.9,py3.10}-common-v1.13.0 + {py3.7,py3.8,py3.9,py3.10,py3.11}-common-v1.22.0 + {py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common-v1.32.1 + + # ~~~ AI ~~~ {py3.8,py3.11,py3.12}-anthropic-v0.16.0 {py3.8,py3.11,py3.12}-anthropic-v0.27.0 @@ -154,17 +151,16 @@ envlist = # ~~~ DBs ~~~ {py3.7,py3.11,py3.12}-clickhouse_driver-v0.2.9 - {py3.6}-pymongo-v3.5.1 - {py3.6,py3.10,py3.11}-pymongo-v3.13.0 - {py3.6,py3.9,py3.10}-pymongo-v4.0.2 + {py3.7}-pymongo-v3.7.2 + {py3.7,py3.10,py3.11}-pymongo-v3.13.0 + {py3.7,py3.9,py3.10}-pymongo-v4.0.2 {py3.9,py3.12,py3.13}-pymongo-v4.12.0 - {py3.6}-redis_py_cluster_legacy-v1.3.6 - {py3.6,py3.7}-redis_py_cluster_legacy-v2.0.0 - {py3.6,py3.7,py3.8}-redis_py_cluster_legacy-v2.1.3 + {py3.7}-redis_py_cluster_legacy-v2.0.0 + {py3.7,py3.8}-redis_py_cluster_legacy-v2.1.3 - {py3.6,py3.8,py3.9}-sqlalchemy-v1.3.24 - {py3.6,py3.11,py3.12}-sqlalchemy-v1.4.54 + {py3.7,py3.8,py3.9}-sqlalchemy-v1.3.24 + {py3.7,py3.11,py3.12}-sqlalchemy-v1.4.54 {py3.7,py3.12,py3.13}-sqlalchemy-v2.0.40 @@ -192,11 +188,11 @@ envlist = {py3.8,py3.11,py3.12}-ariadne-v0.24.0 {py3.9,py3.12,py3.13}-ariadne-v0.26.2 - {py3.6,py3.9,py3.10}-gql-v3.4.1 + {py3.7,py3.9,py3.10}-gql-v3.4.1 {py3.7,py3.11,py3.12}-gql-v3.5.2 {py3.9,py3.12,py3.13}-gql-v3.6.0b4 - {py3.6,py3.9,py3.10}-graphene-v3.3 + {py3.7,py3.9,py3.10}-graphene-v3.3 {py3.8,py3.12,py3.13}-graphene-v3.4.3 {py3.8,py3.10,py3.11}-strawberry-v0.209.8 @@ -214,19 +210,19 @@ envlist = # ~~~ Tasks ~~~ - {py3.6,py3.7,py3.8}-celery-v4.4.7 - {py3.6,py3.7,py3.8}-celery-v5.0.5 + {py3.8}-celery-v4.4.7 + {py3.8}-celery-v5.0.5 {py3.8,py3.12,py3.13}-celery-v5.5.2 - {py3.6,py3.7}-dramatiq-v1.9.0 - {py3.6,py3.8,py3.9}-dramatiq-v1.12.3 + {py3.7}-dramatiq-v1.9.0 + {py3.7,py3.8,py3.9}-dramatiq-v1.12.3 {py3.7,py3.10,py3.11}-dramatiq-v1.15.0 {py3.8,py3.12,py3.13}-dramatiq-v1.17.1 - {py3.6,py3.7}-huey-v2.1.3 - {py3.6,py3.7}-huey-v2.2.0 - {py3.6,py3.7}-huey-v2.3.2 - {py3.6,py3.11,py3.12}-huey-v2.5.3 + {py3.7}-huey-v2.1.3 + {py3.7}-huey-v2.2.0 + {py3.7}-huey-v2.3.2 + {py3.7,py3.11,py3.12}-huey-v2.5.3 {py3.8,py3.9}-spark-v3.0.3 {py3.8,py3.9}-spark-v3.2.4 @@ -235,24 +231,24 @@ envlist = # ~~~ Web 1 ~~~ - {py3.6,py3.7}-django-v1.11.29 - {py3.6,py3.8,py3.9}-django-v2.2.28 - {py3.6,py3.9,py3.10}-django-v3.2.25 + {py3.7}-django-v2.0.13 + {py3.7,py3.8,py3.9}-django-v2.2.28 + {py3.7,py3.9,py3.10}-django-v3.2.25 {py3.8,py3.11,py3.12}-django-v4.2.20 {py3.10,py3.11,py3.12}-django-v5.0.14 {py3.10,py3.12,py3.13}-django-v5.2 - {py3.6,py3.7,py3.8}-flask-v1.1.4 + {py3.7,py3.8}-flask-v1.1.4 {py3.8,py3.12,py3.13}-flask-v2.3.3 {py3.8,py3.12,py3.13}-flask-v3.0.3 {py3.9,py3.12,py3.13}-flask-v3.1.0 - {py3.6,py3.9,py3.10}-starlette-v0.16.0 + {py3.7,py3.9,py3.10}-starlette-v0.16.0 {py3.7,py3.10,py3.11}-starlette-v0.26.1 {py3.8,py3.11,py3.12}-starlette-v0.36.3 {py3.9,py3.12,py3.13}-starlette-v0.46.2 - {py3.6,py3.9,py3.10}-fastapi-v0.79.1 + {py3.7,py3.9,py3.10}-fastapi-v0.79.1 {py3.7,py3.10,py3.11}-fastapi-v0.91.0 {py3.7,py3.10,py3.11}-fastapi-v0.103.2 {py3.8,py3.12,py3.13}-fastapi-v0.115.12 @@ -264,12 +260,11 @@ envlist = {py3.7,py3.9,py3.10}-aiohttp-v3.8.6 {py3.9,py3.12,py3.13}-aiohttp-v3.11.18 - {py3.6,py3.7}-bottle-v0.12.25 + {py3.7}-bottle-v0.12.25 {py3.8,py3.12,py3.13}-bottle-v0.13.3 - {py3.6}-falcon-v1.4.1 - {py3.6,py3.7}-falcon-v2.0.0 - {py3.6,py3.11,py3.12}-falcon-v3.1.3 + {py3.7,py3.8,py3.9}-falcon-v3.0.1 + {py3.7,py3.11,py3.12}-falcon-v3.1.3 {py3.8,py3.11,py3.12}-falcon-v4.0.2 {py3.8,py3.10,py3.11}-litestar-v2.0.1 @@ -277,27 +272,25 @@ envlist = {py3.8,py3.11,py3.12}-litestar-v2.10.0 {py3.8,py3.12,py3.13}-litestar-v2.15.2 - {py3.6}-pyramid-v1.8.6 - {py3.6,py3.8,py3.9}-pyramid-v1.10.8 - {py3.6,py3.10,py3.11}-pyramid-v2.0.2 + {py3.7,py3.8,py3.9}-pyramid-v1.10.8 + {py3.7,py3.10,py3.11}-pyramid-v2.0.2 {py3.8,py3.10,py3.11}-starlite-v1.48.1 {py3.8,py3.10,py3.11}-starlite-v1.49.0 {py3.8,py3.10,py3.11}-starlite-v1.50.2 {py3.8,py3.10,py3.11}-starlite-v1.51.16 - {py3.6,py3.7,py3.8}-tornado-v6.0.4 - {py3.6,py3.8,py3.9}-tornado-v6.1 + {py3.7,py3.8}-tornado-v6.0.4 + {py3.7,py3.8,py3.9}-tornado-v6.1 {py3.7,py3.9,py3.10}-tornado-v6.2 {py3.8,py3.10,py3.11}-tornado-v6.4.2 # ~~~ Misc ~~~ - {py3.6,py3.12,py3.13}-loguru-v0.7.3 + {py3.7,py3.12,py3.13}-loguru-v0.7.3 - {py3.6}-trytond-v4.6.22 - {py3.6}-trytond-v4.8.18 - {py3.6,py3.7,py3.8}-trytond-v5.8.16 + {py3.7}-trytond-v5.0.63 + {py3.7,py3.8}-trytond-v5.8.16 {py3.8,py3.10,py3.11}-trytond-v6.8.17 {py3.8,py3.11,py3.12}-trytond-v7.0.30 {py3.9,py3.12,py3.13}-trytond-v7.6.0 @@ -316,22 +309,13 @@ deps = linters: -r requirements-linting.txt linters: werkzeug<2.3.0 - # === Common === - py3.8-common: hypothesis - common: pytest-asyncio - # See https://github.com/pytest-dev/pytest/issues/9621 - # and https://github.com/pytest-dev/pytest-forked/issues/67 - # for justification of the upper bound on pytest - {py3.6,py3.7}-common: pytest<7.0.0 - {py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-common: pytest - # === Gevent === - {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-gevent: gevent>=22.10.0, <22.11.0 + {py3.7,py3.8,py3.9,py3.10,py3.11}-gevent: gevent>=22.10.0, <22.11.0 {py3.12}-gevent: gevent # See https://github.com/pytest-dev/pytest/issues/9621 # and https://github.com/pytest-dev/pytest-forked/issues/67 # for justification of the upper bound on pytest - {py3.6,py3.7}-gevent: pytest<7.0.0 + py3.7-gevent: pytest<7.0.0 {py3.8,py3.9,py3.10,py3.11,py3.12}-gevent: pytest # === Integrations === @@ -425,12 +409,6 @@ deps = openai-latest: tiktoken~=0.6.0 openai-notiktoken: openai - # OpenTelemetry (OTel) - opentelemetry: opentelemetry-distro - - # OpenTelemetry Experimental (POTel) - potel: -e .[opentelemetry-experimental] - # pure_eval pure_eval: pure_eval @@ -455,7 +433,7 @@ deps = # Redis redis: fakeredis!=1.7.4 redis: pytest<8.0.0 - {py3.6,py3.7}-redis: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 + py3.7-redis: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 {py3.7,py3.8,py3.9,py3.10,py3.11,py3.12,py3.13}-redis: pytest-asyncio redis-v3: redis~=3.0 redis-v4: redis~=4.0 @@ -467,13 +445,11 @@ deps = # RQ (Redis Queue) # https://github.com/jamesls/fakeredis/issues/245 - rq-v{0.6}: fakeredis<1.0 - rq-v{0.6}: redis<3.2.2 rq-v{0.13,1.0,1.5,1.10}: fakeredis>=1.0,<1.7.4 rq-v{1.15,1.16}: fakeredis<2.28.0 - {py3.6,py3.7}-rq-v{1.15,1.16}: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 + py3.7-rq-v{1.15,1.16}: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 rq-latest: fakeredis<2.28.0 - {py3.6,py3.7}-rq-latest: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 + py3.7-rq-latest: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341 rq-v0.6: rq~=0.6.0 rq-v0.13: rq~=0.13.0 rq-v1.0: rq~=1.0.0 @@ -488,7 +464,6 @@ deps = sanic: aiohttp sanic-v{24.6}: sanic_testing sanic-latest: sanic_testing - {py3.6}-sanic: aiocontextvars==0.2.1 sanic-v0.8: sanic~=0.8.0 sanic-v20: sanic~=20.0 sanic-v24.6: sanic~=24.6.0 @@ -498,6 +473,17 @@ deps = # These come from the populate_tox.py script. Eventually we should move all # integration tests there. + # ~~~ Common ~~~ + common-v1.4.1: opentelemetry-sdk==1.4.1 + common-v1.13.0: opentelemetry-sdk==1.13.0 + common-v1.22.0: opentelemetry-sdk==1.22.0 + common-v1.32.1: opentelemetry-sdk==1.32.1 + common: pytest + common: pytest-asyncio + py3.7-common: pytest<7.0.0 + py3.8-common: hypothesis + + # ~~~ AI ~~~ anthropic-v0.16.0: anthropic==0.16.0 anthropic-v0.27.0: anthropic==0.27.0 @@ -522,13 +508,12 @@ deps = # ~~~ DBs ~~~ clickhouse_driver-v0.2.9: clickhouse-driver==0.2.9 - pymongo-v3.5.1: pymongo==3.5.1 + pymongo-v3.7.2: pymongo==3.7.2 pymongo-v3.13.0: pymongo==3.13.0 pymongo-v4.0.2: pymongo==4.0.2 pymongo-v4.12.0: pymongo==4.12.0 pymongo: mockupdb - redis_py_cluster_legacy-v1.3.6: redis-py-cluster==1.3.6 redis_py_cluster_legacy-v2.0.0: redis-py-cluster==2.0.0 redis_py_cluster_legacy-v2.1.3: redis-py-cluster==2.1.3 @@ -605,7 +590,6 @@ deps = celery-v5.5.2: celery==5.5.2 celery: newrelic celery: redis - py3.7-celery: importlib-metadata<5.0 dramatiq-v1.9.0: dramatiq==1.9.0 dramatiq-v1.12.3: dramatiq==1.12.3 @@ -624,12 +608,13 @@ deps = # ~~~ Web 1 ~~~ - django-v1.11.29: django==1.11.29 + django-v2.0.13: django==2.0.13 django-v2.2.28: django==2.2.28 django-v3.2.25: django==3.2.25 django-v4.2.20: django==4.2.20 django-v5.0.14: django==5.0.14 django-v5.2: django==5.2 + django: channels[daphne] django: psycopg2-binary django: djangorestframework django: pytest-django @@ -639,19 +624,14 @@ deps = django-v5.0.14: pytest-asyncio django-v5.2: pytest-asyncio django-v2.2.28: six - django-v1.11.29: djangorestframework>=3.0,<4.0 - django-v1.11.29: Werkzeug<2.1.0 + django-v2.0.13: djangorestframework>=3.0,<4.0 + django-v2.0.13: Werkzeug<2.1.0 django-v2.2.28: djangorestframework>=3.0,<4.0 django-v2.2.28: Werkzeug<2.1.0 django-v3.2.25: djangorestframework>=3.0,<4.0 django-v3.2.25: Werkzeug<2.1.0 - django-v1.11.29: pytest-django<4.0 + django-v2.0.13: pytest-django<4.0 django-v2.2.28: pytest-django<4.0 - django-v2.2.28: channels[daphne] - django-v3.2.25: channels[daphne] - django-v4.2.20: channels[daphne] - django-v5.0.14: channels[daphne] - django-v5.2: channels[daphne] flask-v1.1.4: flask==1.1.4 flask-v2.3.3: flask==2.3.3 @@ -705,8 +685,7 @@ deps = bottle-v0.13.3: bottle==0.13.3 bottle: werkzeug<2.1.0 - falcon-v1.4.1: falcon==1.4.1 - falcon-v2.0.0: falcon==2.0.0 + falcon-v3.0.1: falcon==3.0.1 falcon-v3.1.3: falcon==3.1.3 falcon-v4.0.2: falcon==4.0.2 @@ -721,7 +700,6 @@ deps = litestar-v2.0.1: httpx<0.28 litestar-v2.5.5: httpx<0.28 - pyramid-v1.8.6: pyramid==1.8.6 pyramid-v1.10.8: pyramid==1.10.8 pyramid-v2.0.2: pyramid==2.0.2 pyramid: werkzeug<2.1.0 @@ -751,15 +729,13 @@ deps = # ~~~ Misc ~~~ loguru-v0.7.3: loguru==0.7.3 - trytond-v4.6.22: trytond==4.6.22 - trytond-v4.8.18: trytond==4.8.18 + trytond-v5.0.63: trytond==5.0.63 trytond-v5.8.16: trytond==5.8.16 trytond-v6.8.17: trytond==6.8.17 trytond-v7.0.30: trytond==7.0.30 trytond-v7.6.0: trytond==7.6.0 trytond: werkzeug - trytond-v4.6.22: werkzeug<1.0 - trytond-v4.8.18: werkzeug<1.0 + trytond-v5.0.63: werkzeug<1.0 typer-v0.15.3: typer==0.15.3 @@ -769,9 +745,9 @@ setenv = PYTHONDONTWRITEBYTECODE=1 OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES COVERAGE_FILE=.coverage-sentry-{envname} - py3.6: COVERAGE_RCFILE=.coveragerc36 django: DJANGO_SETTINGS_MODULE=tests.integrations.django.myapp.settings + py3.12-django: PIP_CONSTRAINT=constraints.txt common: TESTPATH=tests gevent: TESTPATH=tests @@ -808,8 +784,6 @@ setenv = loguru: TESTPATH=tests/integrations/loguru openai: TESTPATH=tests/integrations/openai openfeature: TESTPATH=tests/integrations/openfeature - opentelemetry: TESTPATH=tests/integrations/opentelemetry - potel: TESTPATH=tests/integrations/opentelemetry pure_eval: TESTPATH=tests/integrations/pure_eval pymongo: TESTPATH=tests/integrations/pymongo pyramid: TESTPATH=tests/integrations/pyramid @@ -847,7 +821,6 @@ extras = pymongo: pymongo basepython = - py3.6: python3.6 py3.7: python3.7 py3.8: python3.8 py3.9: python3.9