From b9f8c0fd4ed9359415a3221efb993ea4bf978aba Mon Sep 17 00:00:00 2001 From: Rey Abolofia Date: Tue, 13 Feb 2024 12:28:46 -0800 Subject: [PATCH 01/18] Replace nose2 with pytest. (#416) --- .github/workflows/build.yml | 2 +- .gitignore | 1 - LICENSE-3rdparty.csv | 3 +-- pyproject.toml | 6 +++--- scripts/run_tests.sh | 2 +- 5 files changed, 6 insertions(+), 8 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 7092388b..5df92704 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -62,7 +62,7 @@ jobs: - name: Run tests run: | source venv/bin/activate - nose2 -v + pytest -vv integration-test: runs-on: ubuntu-latest diff --git a/.gitignore b/.gitignore index d6b01dbf..3aeeb1fb 100644 --- a/.gitignore +++ b/.gitignore @@ -28,7 +28,6 @@ pip-log.txt # Unit test / coverage reports .coverage .tox -nosetests.xml #Misc .cache/ diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index e15ac6e5..a6692772 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -1,4 +1,3 @@ Component,Origin,License,Copyright flake8,gitlab.com/pycqa/flake8,MIT,"Copyright (C) 2011-2013 Tarek Ziade . Copyright (C) 2012-2016 Ian Cordasco ." -nose2,github.com/nose-devs/nose2,BSD-2-Clause,"Copyright (c) 2012, Jason Pellerin. All rights reserved." -wrapt,github.com/GrahamDumpleton/wrapt,BSD-2-Clause,"Copyright (c) 2013-2019, Graham Dumpleton" \ No newline at end of file +wrapt,github.com/GrahamDumpleton/wrapt,BSD-2-Clause,"Copyright (c) 2013-2019, Graham Dumpleton" diff --git a/pyproject.toml b/pyproject.toml index 82685b98..5dbd9e5b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,16 +36,16 @@ importlib_metadata = {version = "*", python = "<3.8"} boto3 = { version = "^1.28.0", optional = true } typing_extensions = {version = "^4.0", python = "<3.8"} requests = { version ="^2.22.0", optional = true } -nose2 = { version= "^0.9.1", optional = true } +pytest = { version= "^8.0.0", optional = true } flake8 = { version = "^5.0.4", optional = true } [tool.poetry.extras] dev = [ "boto3", - "requests", - "nose2", "flake8", + "pytest", + "requests", ] [build-system] diff --git a/scripts/run_tests.sh b/scripts/run_tests.sh index e06e9912..540ac898 100755 --- a/scripts/run_tests.sh +++ b/scripts/run_tests.sh @@ -18,7 +18,7 @@ do --build-arg python_version=$python_version docker run -w /test \ datadog-lambda-python-test:$python_version \ - nose2 -v + pytest -vv docker run -w /test \ datadog-lambda-python-test:$python_version \ flake8 datadog_lambda/ From 17c8e0be8f12bec1efc7558aa954e2522b10f33d Mon Sep 17 00:00:00 2001 From: Rey Abolofia Date: Wed, 14 Feb 2024 12:00:20 -0800 Subject: [PATCH 02/18] Prevent function crash when setting unknown log level. (#418) --- datadog_lambda/__init__.py | 6 ++---- datadog_lambda/logger.py | 27 ++++++++++++++++++++++++ tests/test_logger.py | 43 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 72 insertions(+), 4 deletions(-) create mode 100644 datadog_lambda/logger.py create mode 100644 tests/test_logger.py diff --git a/datadog_lambda/__init__.py b/datadog_lambda/__init__.py index 20b42443..cfb5f8e3 100644 --- a/datadog_lambda/__init__.py +++ b/datadog_lambda/__init__.py @@ -1,6 +1,5 @@ -import os -import logging from datadog_lambda.cold_start import initialize_cold_start_tracing +from datadog_lambda.logger import initialize_logging initialize_cold_start_tracing() @@ -13,5 +12,4 @@ __version__ = importlib_metadata.version(__name__) -logger = logging.getLogger(__name__) -logger.setLevel(logging.getLevelName(os.environ.get("DD_LOG_LEVEL", "INFO").upper())) +initialize_logging(__name__) diff --git a/datadog_lambda/logger.py b/datadog_lambda/logger.py new file mode 100644 index 00000000..ae2b5d6d --- /dev/null +++ b/datadog_lambda/logger.py @@ -0,0 +1,27 @@ +import logging +import os + +try: + _level_mappping = logging.getLevelNamesMapping() +except AttributeError: + # python 3.8 + _level_mappping = {name: num for num, name in logging._levelToName.items()} +# https://docs.datadoghq.com/agent/troubleshooting/debug_mode/?tab=agentv6v7#agent-log-level +_level_mappping.update( + { + "TRACE": 5, + "WARN": logging.WARNING, + "OFF": 100, + } +) + + +def initialize_logging(name): + logger = logging.getLogger(name) + str_level = (os.environ.get("DD_LOG_LEVEL") or "INFO").upper() + level = _level_mappping.get(str_level) + if level is None: + logger.setLevel(logging.INFO) + logger.warning("Invalid log level: %s Defaulting to INFO", str_level) + else: + logger.setLevel(level) diff --git a/tests/test_logger.py b/tests/test_logger.py new file mode 100644 index 00000000..eb2822f2 --- /dev/null +++ b/tests/test_logger.py @@ -0,0 +1,43 @@ +import io +import logging +import pytest + +from datadog_lambda.logger import initialize_logging + +_test_initialize_logging = ( + ("TRACE", (10, 20, 30, 40, 50)), + ("DEBUG", (10, 20, 30, 40, 50)), + ("debug", (10, 20, 30, 40, 50)), + ("INFO", (20, 30, 40, 50)), + ("WARNING", (30, 40, 50)), + ("WARN", (30, 40, 50)), + ("ERROR", (40, 50)), + ("CRITICAL", (50,)), + ("OFF", ()), + ("", (20, 30, 40, 50)), + (None, (20, 30, 40, 50)), + ("PURPLE", (30, 20, 30, 40, 50)), # log warning then default to INFO +) + + +@pytest.mark.parametrize("level,logged_levels", _test_initialize_logging) +def test_initialize_logging(level, logged_levels, monkeypatch): + if level is not None: + monkeypatch.setenv("DD_LOG_LEVEL", level) + + stream = io.StringIO() + handler = logging.StreamHandler(stream) + handler.setFormatter(logging.Formatter("%(levelno)s")) + logger = logging.getLogger(__name__) + logger.addHandler(handler) + + initialize_logging(__name__) + + logger.debug("debug") + logger.info("info") + logger.warning("warning") + logger.error("error") + logger.critical("critical") + + logged = tuple(map(int, stream.getvalue().strip().split())) + assert logged == logged_levels From 1649656f63b1e25942639f2cc220ebbc9dcd3f33 Mon Sep 17 00:00:00 2001 From: AJ Stuyvenberg Date: Mon, 4 Mar 2024 13:52:00 -0500 Subject: [PATCH 03/18] feat: Automatic layer releases (#421) * feat: Copy gitlab-ci from datadog-lambda-js * feat: first crack at migration the build script for individual publishing * feat: secrets for python. TODO - actually create them * feat: Add python runtimes * feat: re-organize architecture flags. Support separate arch/name * feat: first draft of build generator template * feat: Add datasources and first cut at publish_pypi script * feat: oops, no node. TODO: fix container names * feat: pass the right layer name to sign layers * feat: Python before script * feat: arch parameterized sign layer * feat: fix up runtimes * fix: build layer arch * fix: Check layer size arch * feat: zip file is py not python * fix: fix up check layer size script * hotfix: use js ssm secrets until I can figure out which AWS account CI runs in and can add them * feat: Less uniformity on images in python, specify in runtimes.yaml * feat: Can't use permissions across repos * feat: lol our ci runners set DD_SERVICE so it breaks our unit tests * feat: Gotta add yarn so we can add serverless * feat: lint * feat: Break install-node into separate task * feat: Use name instead of python_version * fix: no python-, just the version number * empty commit to bump CI * feat: integration tests should run for both architectures * fix: arg, no arch in integration test * fix: pass sls framework the proper arch * feat: fix script * feat: Default to x86 just so the invoke function works * fix: nvm pass the sls arch everywhere I guess * fix: strip arch from user agent * feat: the right x86_64 arch for serverless framework * fix: globalize local env * feat: fix regex * fix: ints should pass now * feat: Update tests * fix: lint * fix: lint * feat: lint * feat: I think we just need one lint * feat: remove install node for publish step * feat: remove integration tests from github build, we run them in gitlab now * feat: token applied --- .github/workflows/build.yml | 71 ------- .gitlab-ci.yml | 29 +++ ci/config.yaml | 13 ++ ci/datasources/environments.yaml | 9 + ci/datasources/regions.yaml | 29 +++ ci/datasources/runtimes.yaml | 41 ++++ ci/get_secrets.sh | 48 +++++ ci/input_files/build.yaml.tpl | 178 +++++++++++++++++ ci/publish_layers.sh | 182 ++++++++++++++++++ ci/publish_pypi.sh | 23 +++ scripts/check_layer_size.sh | 40 ++-- scripts/run_integration_tests.sh | 17 +- tests/integration/serverless.yml | 3 +- .../snapshots/logs/sync-metrics_python310.log | 36 ++-- .../snapshots/logs/sync-metrics_python311.log | 36 ++-- .../snapshots/logs/sync-metrics_python312.log | 36 ++-- .../snapshots/logs/sync-metrics_python38.log | 36 ++-- .../snapshots/logs/sync-metrics_python39.log | 36 ++-- tests/test_tracing.py | 19 +- tests/test_wrapper.py | 4 + 20 files changed, 691 insertions(+), 195 deletions(-) create mode 100644 .gitlab-ci.yml create mode 100644 ci/config.yaml create mode 100644 ci/datasources/environments.yaml create mode 100644 ci/datasources/regions.yaml create mode 100644 ci/datasources/runtimes.yaml create mode 100755 ci/get_secrets.sh create mode 100644 ci/input_files/build.yaml.tpl create mode 100755 ci/publish_layers.sh create mode 100755 ci/publish_pypi.sh diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5df92704..eeb77064 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -63,74 +63,3 @@ jobs: run: | source venv/bin/activate pytest -vv - - integration-test: - runs-on: ubuntu-latest - strategy: - matrix: - runtime-param: ['3.8', '3.9', '3.10', '3.11', '3.12'] - steps: - - name: Checkout - uses: actions/checkout@v3 - - - name: Set up Node 14 - uses: actions/setup-node@v3 - with: - node-version: 14 - - - name: Cache Node modules - id: cache-node-modules - uses: actions/cache@v3 - with: - path: "**/node_modules" - key: ${{ runner.os }}-modules-${{ hashFiles('**/yarn.lock') }} - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3.9 - - - name: Install Python dependencies - run: | - pip install virtualenv - virtualenv venv - source venv/bin/activate - pip install .[dev] - - - name: Install Serverless Framework - run: sudo yarn global add serverless@^3.7.0 --prefix /usr/local - - name: Install Crossbuild Deps - run: | - sudo apt-get update --allow-releaseinfo-change --fix-missing - sudo apt install -y qemu-user-static binfmt-support - - - name: Install dependencies - if: steps.cache-node-modules.outputs.cache-hit != 'true' - working-directory: tests/integration - run: yarn install - - - name: Run tests - env: - BUILD_LAYERS: true - DD_API_KEY: ${{ secrets.DD_API_KEY }} - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - RUNTIME_PARAM: ${{ matrix.runtime-param }} - run: ./scripts/run_integration_tests.sh - - - name: Send success metric - env: - DD_API_KEY: ${{ secrets.DD_API_KEY }} - run: ./scripts/send_status_metric.sh 0 $DD_API_KEY - - integration-test-failure: - runs-on: ubuntu-latest - needs: [integration-test] - if: always() && (needs.integration-test.result == 'failure') - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Send a failure metric - env: - DD_API_KEY: ${{ secrets.DD_API_KEY }} - run: ./scripts/send_status_metric.sh 1 $DD_API_KEY diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 00000000..0f36a781 --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,29 @@ +stages: + - pre + - build + +.go-cache: &go-cache + key: datadog-lambda-python-go-cache + policy: pull + +generator: + stage: pre + image: registry.ddbuild.io/images/mirror/golang:alpine + tags: ["arch:amd64"] + cache: *go-cache + script: + - apk add --no-cache gomplate + - gomplate --config ci/config.yaml + artifacts: + paths: + - ci/*-pipeline.yaml + +build-layers: + stage: build + trigger: + include: + - artifact: ci/build-pipeline.yaml + job: generator + strategy: depend + rules: + - when: on_success diff --git a/ci/config.yaml b/ci/config.yaml new file mode 100644 index 00000000..d37a0f31 --- /dev/null +++ b/ci/config.yaml @@ -0,0 +1,13 @@ +inputFiles: + - ci/input_files/build.yaml.tpl + +outputFiles: + - ci/build-pipeline.yaml + +datasources: + runtimes: + url: ci/datasources/runtimes.yaml + regions: + url: ci/datasources/regions.yaml + environments: + url: ci/datasources/environments.yaml diff --git a/ci/datasources/environments.yaml b/ci/datasources/environments.yaml new file mode 100644 index 00000000..90056ab0 --- /dev/null +++ b/ci/datasources/environments.yaml @@ -0,0 +1,9 @@ +environments: + - name: sandbox + external_id: sandbox-publish-externalid + role_to_assume: sandbox-layer-deployer + account: 425362996713 + - name: prod + external_id: prod-publish-externalid + role_to_assume: dd-serverless-layer-deployer-role + account: 464622532012 diff --git a/ci/datasources/regions.yaml b/ci/datasources/regions.yaml new file mode 100644 index 00000000..a26372d8 --- /dev/null +++ b/ci/datasources/regions.yaml @@ -0,0 +1,29 @@ +regions: + - code: "us-east-1" + - code: "us-east-2" + - code: "us-west-1" + - code: "us-west-2" + - code: "af-south-1" + - code: "ap-east-1" + - code: "ap-south-1" + - code: "ap-south-2" + - code: "ap-southeast-1" + - code: "ap-southeast-2" + - code: "ap-southeast-3" + - code: "ap-southeast-4" + - code: "ap-northeast-1" + - code: "ap-northeast-2" + - code: "ap-northeast-3" + - code: "ca-central-1" +# - code: "ca-west-1" we don't support it + - code: "eu-central-1" + - code: "eu-central-2" + - code: "eu-west-1" + - code: "eu-west-2" + - code: "eu-west-3" + - code: "eu-south-1" + - code: "eu-south-2" +# - code: "il-central-1" we don't support it + - code: "me-south-1" + - code: "me-central-1" + - code: "sa-east-1" diff --git a/ci/datasources/runtimes.yaml b/ci/datasources/runtimes.yaml new file mode 100644 index 00000000..0e084b08 --- /dev/null +++ b/ci/datasources/runtimes.yaml @@ -0,0 +1,41 @@ +runtimes: + - name: "python38" + python_version: "3.8" + arch: "amd64" + image: "3.8" + - name: "python38" + python_version: "3.8" + arch: "arm64" + image: "3.8" + - name: "python39" + python_version: "3.8" + arch: "amd64" + image: "3.9" + - name: "python39" + python_version: "3.9" + arch: "arm64" + image: "3.9" + - name: "python310" + python_version: "3.10" + arch: "amd64" + image: "3.10" + - name: "python310" + python_version: "3.10" + arch: "arm64" + image: "3.10" + - name: "python311" + python_version: "3.11" + arch: "amd64" + image: "3.11.6" + - name: "python311" + python_version: "3.11" + arch: "arm64" + image: "3.11.6" + - name: "python312" + python_version: "3.12" + arch: "amd64" + image: "3.12.0" + - name: "python312" + python_version: "3.12" + arch: "arm64" + image: "3.12.0" diff --git a/ci/get_secrets.sh b/ci/get_secrets.sh new file mode 100755 index 00000000..9d9c957c --- /dev/null +++ b/ci/get_secrets.sh @@ -0,0 +1,48 @@ +#!/bin/bash + +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License Version 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2023 Datadog, Inc. + +set -e + +if [ -z "$EXTERNAL_ID_NAME" ]; then + printf "[Error] No EXTERNAL_ID_NAME found.\n" + printf "Exiting script...\n" + exit 1 +fi + +if [ -z "$ROLE_TO_ASSUME" ]; then + printf "[Error] No ROLE_TO_ASSUME found.\n" + printf "Exiting script...\n" + exit 1 +fi + +printf "Getting AWS External ID...\n" + +EXTERNAL_ID=$(aws ssm get-parameter \ + --region us-east-1 \ + --name "ci.datadog-lambda-python.$EXTERNAL_ID_NAME" \ + --with-decryption \ + --query "Parameter.Value" \ + --out text) + +printf "Getting DD API KEY...\n" + +export DD_API_KEY=$(aws ssm get-parameter \ + --region us-east-1 \ + --name ci.datadog-lambda-python.dd-api-key \ + --with-decryption \ + --query "Parameter.Value" \ + --out text) + +printf "Assuming role...\n" + +export $(printf "AWS_ACCESS_KEY_ID=%s AWS_SECRET_ACCESS_KEY=%s AWS_SESSION_TOKEN=%s" \ + $(aws sts assume-role \ + --role-arn "arn:aws:iam::$AWS_ACCOUNT:role/$ROLE_TO_ASSUME" \ + --role-session-name "ci.datadog-lambda-python-$CI_JOB_ID-$CI_JOB_STAGE" \ + --query "Credentials.[AccessKeyId,SecretAccessKey,SessionToken]" \ + --external-id $EXTERNAL_ID \ + --output text)) diff --git a/ci/input_files/build.yaml.tpl b/ci/input_files/build.yaml.tpl new file mode 100644 index 00000000..e562092d --- /dev/null +++ b/ci/input_files/build.yaml.tpl @@ -0,0 +1,178 @@ +stages: + - build + - test + - sign + - publish + +.python-before-script: &python-before-script + - pip install virtualenv + - virtualenv venv + - source venv/bin/activate + - pip install .[dev] + +# This is for serverless framework +.install-node: &install-node + - apt-get update + - apt-get install -y ca-certificates curl gnupg xxd + - mkdir -p /etc/apt/keyrings + - curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg + # We are explicitly setting the node_18.x version for the installation + - echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_18.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list + - apt-get update + - apt-get install nodejs -y + - npm install --global yarn + +{{ range $runtime := (ds "runtimes").runtimes }} + +# TODO(astuyve) - figure out python build cache +.{{ $runtime.name }}-{{ $runtime.arch }}-cache: &{{ $runtime.name }}-{{ $runtime.arch }}-cache + key: "$CI_JOB_STAGE-$CI_COMMIT_REF_SLUG" + paths: + - $CI_PROJECT_DIR/.yarn-cache + policy: pull + +build-layer ({{ $runtime.name }}-{{ $runtime.arch }}): + stage: build + tags: ["arch:amd64"] + image: registry.ddbuild.io/images/docker:20.10 + artifacts: + expire_in: 1 hr # Unsigned zips expire in 1 hour + paths: + - .layers/datadog_lambda_py-{{ $runtime.arch }}-{{ $runtime.python_version }}.zip + variables: + CI_ENABLE_CONTAINER_IMAGE_BUILDS: "true" + script: + - PYTHON_VERSION={{ $runtime.python_version }} ARCH={{ $runtime.arch }} ./scripts/build_layers.sh + +check-layer-size ({{ $runtime.name }}-{{ $runtime.arch }}): + stage: test + tags: ["arch:amd64"] + image: registry.ddbuild.io/images/docker:20.10 + needs: + - build-layer ({{ $runtime.name }}-{{ $runtime.arch }}) + dependencies: + - build-layer ({{ $runtime.name }}-{{ $runtime.arch }}) + script: + - PYTHON_VERSION={{ $runtime.python_version }} ./scripts/check_layer_size.sh + +lint python: + stage: test + tags: ["arch:amd64"] + image: registry.ddbuild.io/images/mirror/python:{{ $runtime.image }} + cache: &{{ $runtime.name }}-{{ $runtime.arch }}-cache + before_script: *python-before-script + script: + - source venv/bin/activate + - ./scripts/check_format.sh + +unit-test ({{ $runtime.name }}-{{ $runtime.arch }}): + stage: test + tags: ["arch:amd64"] + image: registry.ddbuild.io/images/mirror/python:{{ $runtime.image }} + cache: &{{ $runtime.name }}-{{ $runtime.arch }}-cache + before_script: *python-before-script + script: + - source venv/bin/activate + - pytest -vv + +integration-test ({{ $runtime.name }}-{{ $runtime.arch }}): + stage: test + tags: ["arch:amd64"] + image: registry.ddbuild.io/images/docker:20.10-py3 + needs: + - build-layer ({{ $runtime.name }}-{{ $runtime.arch }}) + dependencies: + - build-layer ({{ $runtime.name }}-{{ $runtime.arch }}) + cache: &{{ $runtime.name }}-{{ $runtime.arch }}-cache + variables: + CI_ENABLE_CONTAINER_IMAGE_BUILDS: "true" + before_script: + - *install-node + - EXTERNAL_ID_NAME=integration-test-externalid ROLE_TO_ASSUME=sandbox-integration-test-deployer AWS_ACCOUNT=425362996713 source ./ci/get_secrets.sh + - yarn global add serverless --prefix /usr/local + - cd integration_tests && yarn install && cd .. + script: + - RUNTIME_PARAM={{ $runtime.python_version }} ARCH={{ $runtime.arch }} ./scripts/run_integration_tests.sh + +{{ range $environment := (ds "environments").environments }} + +{{ if or (eq $environment.name "prod") }} +sign-layer ({{ $runtime.name }}-{{ $runtime.arch }}): + stage: sign + tags: ["arch:amd64"] + image: registry.ddbuild.io/images/docker:20.10-py3 + rules: + - if: '$CI_COMMIT_TAG =~ /^v.*/' + when: manual + needs: + - build-layer ({{ $runtime.name }}-{{ $runtime.arch }}) + - check-layer-size ({{ $runtime.name }}-{{ $runtime.arch }}) + - lint python + - unit-test ({{ $runtime.name }}-{{ $runtime.arch }}) + - integration-test ({{ $runtime.name }}-{{ $runtime.arch }}) + dependencies: + - build-layer ({{ $runtime.name }}-{{ $runtime.arch }}) + artifacts: # Re specify artifacts so the modified signed file is passed + expire_in: 1 day # Signed layers should expire after 1 day + paths: + - .layers/datadog_lambda_python-{{ $runtime.arch }}-{{ $runtime.python_version }}.zip + before_script: + - apt-get update + - apt-get install -y uuid-runtime + - EXTERNAL_ID_NAME={{ $environment.external_id }} ROLE_TO_ASSUME={{ $environment.role_to_assume }} AWS_ACCOUNT={{ $environment.account }} source ./ci/get_secrets.sh + script: + - LAYER_FILE=datadog_lambda_python-{{ $runtime.arch}}-{{ $runtime.python_version }}.zip ./scripts/sign_layers.sh {{ $environment.name }} +{{ end }} + +publish-layer-{{ $environment.name }} ({{ $runtime.name }}-{{ $runtime.arch }}): + stage: publish + tags: ["arch:amd64"] + image: registry.ddbuild.io/images/docker:20.10-py3 + rules: + - if: '"{{ $environment.name }}" =~ /^(sandbox|staging)/' + when: manual + allow_failure: true + - if: '$CI_COMMIT_TAG =~ /^v.*/' + needs: +{{ if or (eq $environment.name "prod") }} + - sign-layer ({{ $runtime.name }}-{{ $runtime.arch}}) +{{ else }} + - build-layer ({{ $runtime.name }}-{{ $runtime.arch }}) + - check-layer-size ({{ $runtime.name }}-{{ $runtime.arch }}) + - lint python + - unit-test ({{ $runtime.name }}-{{ $runtime.arch }}) + - integration-test ({{ $runtime.name }}-{{ $runtime.arch }}) +{{ end }} + dependencies: +{{ if or (eq $environment.name "prod") }} + - sign-layer ({{ $runtime.name }}-{{ $runtime.arch}}) +{{ else }} + - build-layer ({{ $runtime.name }}-{{ $runtime.arch }}) +{{ end }} + parallel: + matrix: + - REGION: {{ range (ds "regions").regions }} + - {{ .code }} + {{- end}} + before_script: + - EXTERNAL_ID_NAME={{ $environment.external_id }} ROLE_TO_ASSUME={{ $environment.role_to_assume }} AWS_ACCOUNT={{ $environment.account }} source ./ci/get_secrets.sh + script: + - STAGE={{ $environment.name }} PYTHON_VERSION={{ $runtime.python_version }} ARCH={{ $runtime.arch }} ./ci/publish_layers.sh + +{{- end }} + +{{- end }} + +publish-pypi-package: + stage: publish + tags: ["arch:amd64"] + image: registry.ddbuild.io/images/docker:20.10-py3 + cache: [] + rules: + - if: '$CI_COMMIT_TAG =~ /^v.*/' + when: manual + needs: {{ range $runtime := (ds "runtimes").runtimes }} + - sign-layer ({{ $runtime.name }}-{{ $runtime.arch}}) + {{- end }} + script: + - ./ci/publish_pypi.sh diff --git a/ci/publish_layers.sh b/ci/publish_layers.sh new file mode 100755 index 00000000..31a91839 --- /dev/null +++ b/ci/publish_layers.sh @@ -0,0 +1,182 @@ +#!/bin/bash + +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License Version 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2023 Datadog, Inc. + +# PYTHON_VERSION=20.9 REGION=us-east-1 + +set -e + +# Available runtimes: https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html +AWS_CLI_PYTHON_VERSIONS=( + "python3.8" + "python3.8" + "python3.9" + "python3.9" + "python3.10" + "python3.10" + "python3.11" + "python3.11" + "python3.12" + "python3.12" +) +PYTHON_VERSIONS=("3.8-amd64" "3.8-arm64" "3.9-amd64" "3.9-arm64" "3.10-amd64" "3.10-arm64" "3.11-amd64" "3.11-arm64" "3.12-amd64" "3.12-arm64") +LAYER_PATHS=( + ".layers/datadog_lambda_py-amd64-3.8.zip" + ".layers/datadog_lambda_py-arm64-3.8.zip" + ".layers/datadog_lambda_py-amd64-3.9.zip" + ".layers/datadog_lambda_py-arm64-3.9.zip" + ".layers/datadog_lambda_py-amd64-3.10.zip" + ".layers/datadog_lambda_py-arm64-3.10.zip" + ".layers/datadog_lambda_py-amd64-3.11.zip" + ".layers/datadog_lambda_py-arm64-3.11.zip" + ".layers/datadog_lambda_py-amd64-3.12.zip" + ".layers/datadog_lambda_py-arm64-3.12.zip" +) +LAYERS=( + "Datadog-Python38" + "Datadog-Python38-ARM" + "Datadog-Python39" + "Datadog-Python39-ARM" + "Datadog-Python310" + "Datadog-Python310-ARM" + "Datadog-Python311" + "Datadog-Python311-ARM" + "Datadog-Python312" + "Datadog-Python312-ARM" +) +STAGES=('prod', 'sandbox', 'staging') + +printf "Starting script...\n\n" +printf "Installing dependencies\n" +pip install awscli + +publish_layer() { + region=$1 + layer_name=$2 + compatible_runtimes=$3 + layer_path=$4 + + version_nbr=$(aws lambda publish-layer-version --layer-name $layer_name \ + --description "Datadog Lambda Layer for Node" \ + --zip-file "fileb://$layer_path" \ + --region $region \ + --compatible-runtimes $compatible_runtimes \ + | jq -r '.Version') + + permission=$(aws lambda add-layer-version-permission --layer-name $layer_name \ + --version-number $version_nbr \ + --statement-id "release-$version_nbr" \ + --action lambda:GetLayerVersion --principal "*" \ + --region $region) + + echo $version_nbr +} + +# Target Python version +if [ -z $PYTHON_VERSION ]; then + printf "[Error] PYTHON_VERSION version not specified.\n" + exit 1 +fi + +printf "Python version specified: $PYTHON_VERSION\n" +if [[ ! ${PYTHON_VERSIONS[@]} =~ $PYTHON_VERSION ]]; then + printf "[Error] Unsupported PYTHON_VERSION found.\n" + exit 1 +fi + +if [ -z $ARCH ]; then + printf "[Error] ARCH architecture not specified.\n" + exit 1 +fi + +index=0 +for i in "${!PYTHON_VERSIONS[@]}"; do + if [[ "${PYTHON_VERSIONS[$i]}" = "${PYTHON_VERSION}-${ARCH}" ]]; then + index=$i + fi +done + +REGIONS=$(aws ec2 describe-regions | jq -r '.[] | .[] | .RegionName') + +# Target region +if [ -z "$REGION" ]; then + printf "REGION not specified.\n" + exit 1 +fi + +printf "Region specified, region is: $REGION\n" +if [[ ! "$REGIONS" == *"$REGION"* ]]; then + printf "[Error] Could not find $REGION in AWS available regions: \n${REGIONS[@]}\n" + exit 1 +fi + +# Deploy stage +if [ -z "$STAGE" ]; then + printf "[Error] STAGE not specified.\n" + printf "Exiting script...\n" + exit 1 +fi + +printf "Stage specified: $STAGE\n" +if [[ ! ${STAGES[@]} =~ $STAGE ]]; then + printf "[Error] Unsupported STAGE found.\n" + exit 1 +fi + +layer="${LAYERS[$index]}" + +if [[ "$STAGE" =~ ^(staging|sandbox)$ ]]; then + # Deploy latest version + latest_version=$(aws lambda list-layer-versions --region $REGION --layer-name $layer --query 'LayerVersions[0].Version || `0`') + VERSION=$(($latest_version + 1)) +else + # Running on prod + if [ -z "$CI_COMMIT_TAG" ]; then + printf "[Error] No CI_COMMIT_TAG found.\n" + printf "Exiting script...\n" + exit 1 + else + printf "Tag found in environment: $CI_COMMIT_TAG\n" + fi + + VERSION=$(echo "${CI_COMMIT_TAG##*v}" | cut -d. -f2) +fi + +# Target layer version +if [ -z "$VERSION" ]; then + printf "[Error] VERSION for layer version not specified.\n" + printf "Exiting script...\n" + exit 1 +else + printf "Layer version parsed: $VERSION\n" +fi + +printf "[$REGION] Starting publishing layers...\n" +aws_cli_python_version_key="${AWS_CLI_PYTHON_VERSIONS[$index]}" +layer_path="${LAYER_PATHS[$index]}" + +latest_version=$(aws lambda list-layer-versions --region $REGION --layer-name $layer --query 'LayerVersions[0].Version || `0`') +if [ $latest_version -ge $VERSION ]; then + printf "[$REGION] Layer $layer version $VERSION already exists in region $REGION, skipping...\n" + exit 0 +elif [ $latest_version -lt $((VERSION-1)) ]; then + printf "[$REGION][WARNING] The latest version of layer $layer in region $REGION is $latest_version, this will publish all the missing versions including $VERSION\n" +fi + +while [ $latest_version -lt $VERSION ]; do + latest_version=$(publish_layer $REGION $layer $aws_cli_python_version_key $layer_path) + printf "[$REGION] Published version $latest_version for layer $layer in region $REGION\n" + + # This shouldn't happen unless someone manually deleted the latest version, say 28, and + # then tries to republish 28 again. The published version would actually be 29, because + # Lambda layers are immutable and AWS will skip deleted version and use the next number. + if [ $latest_version -gt $VERSION ]; then + printf "[$REGION] Published version $latest_version is greater than the desired version $VERSION!" + exit 1 + fi +done + +printf "[$REGION] Finished publishing layers...\n\n" diff --git a/ci/publish_pypi.sh b/ci/publish_pypi.sh new file mode 100755 index 00000000..d7ec78fd --- /dev/null +++ b/ci/publish_pypi.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +# Unless explicitly stated otherwise all files in this repository are licensed +# under the Apache License Version 2.0. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2023 Datadog, Inc. +set -e +PYPI_TOKEN=$(aws ssm get-parameter \ + --region us-east-1 \ + --name "ci.datadog-lambda-python.pypi-token" \ + --with-decryption \ + --query "Parameter.Value" \ + --out text) +# Builds the lambda layer and upload to Pypi + +# Clear previously built distributions +if [ -d "dist" ]; then + echo "Removing folder 'dist' to clear previously built distributions" + rm -rf dist; +fi + +# Publish to pypi +poetry publish --build --username __token__ --password $PYPI_TOKEN diff --git a/scripts/check_layer_size.sh b/scripts/check_layer_size.sh index 395f79a4..f1c8c813 100755 --- a/scripts/check_layer_size.sh +++ b/scripts/check_layer_size.sh @@ -14,26 +14,22 @@ MAX_LAYER_UNCOMPRESSED_SIZE_KB=$(expr 24 \* 1024) LAYER_FILES_PREFIX="datadog_lambda_py" LAYER_DIR=".layers" -VERSIONS=("3.8" "3.9" "3.10" "3.11" "3.12") -for version in "${VERSIONS[@]}" -do - FILE=$LAYER_DIR/${LAYER_FILES_PREFIX}-amd64-${version}.zip - FILE_SIZE=$(stat --printf="%s" $FILE) - FILE_SIZE_KB="$(( ${FILE_SIZE%% *} / 1024))" - echo "Layer file ${FILE} has zipped size ${FILE_SIZE_KB} kb" - if [ "$FILE_SIZE_KB" -gt "$MAX_LAYER_COMPRESSED_SIZE_KB" ]; then - echo "Zipped size exceeded limit $MAX_LAYER_COMPRESSED_SIZE_KB kb" - exit 1 - fi - mkdir tmp - unzip -q $FILE -d tmp - UNZIPPED_FILE_SIZE=$(du -shb tmp/ | cut -f1) - UNZIPPED_FILE_SIZE_KB="$(( ${UNZIPPED_FILE_SIZE%% *} / 1024))" - rm -rf tmp - echo "Layer file ${FILE} has unzipped size ${UNZIPPED_FILE_SIZE_KB} kb" - if [ "$UNZIPPED_FILE_SIZE_KB" -gt "$MAX_LAYER_UNCOMPRESSED_SIZE_KB" ]; then - echo "Unzipped size exceeded limit $MAX_LAYER_UNCOMPRESSED_SIZE_KB kb" - exit 1 - fi -done +FILE=$LAYER_DIR/${LAYER_FILES_PREFIX}-${ARCH}-${PYTHON_VERSION}.zip +FILE_SIZE=$(stat --printf="%s" $FILE) +FILE_SIZE_KB="$(( ${FILE_SIZE%% *} / 1024))" +echo "Layer file ${FILE} has zipped size ${FILE_SIZE_KB} kb" +if [ "$FILE_SIZE_KB" -gt "$MAX_LAYER_COMPRESSED_SIZE_KB" ]; then + echo "Zipped size exceeded limit $MAX_LAYER_COMPRESSED_SIZE_KB kb" + exit 1 +fi +mkdir tmp +unzip -q $FILE -d tmp +UNZIPPED_FILE_SIZE=$(du -shb tmp/ | cut -f1) +UNZIPPED_FILE_SIZE_KB="$(( ${UNZIPPED_FILE_SIZE%% *} / 1024))" +rm -rf tmp +echo "Layer file ${FILE} has unzipped size ${UNZIPPED_FILE_SIZE_KB} kb" +if [ "$UNZIPPED_FILE_SIZE_KB" -gt "$MAX_LAYER_UNCOMPRESSED_SIZE_KB" ]; then + echo "Unzipped size exceeded limit $MAX_LAYER_UNCOMPRESSED_SIZE_KB kb" + exit 1 +fi diff --git a/scripts/run_integration_tests.sh b/scripts/run_integration_tests.sh index 27dd8ec8..13edf1ac 100755 --- a/scripts/run_integration_tests.sh +++ b/scripts/run_integration_tests.sh @@ -71,6 +71,12 @@ else echo "Not building layers, ensure they've already been built or re-run with 'BUILD_LAYERS=true DD_API_KEY=XXXX ./scripts/run_integration_tests.sh'" fi +SERVERLESS_FRAMEWORK_ARCH="" +if [ "$ARCH" = "amd64" ]; then + SERVERLESS_FRAMEWORK_ARCH="x86_64" +else + SERVERLESS_FRAMEWORK_ARCH="arm64" +fi cd $integration_tests_dir input_event_files=$(ls ./input_events) @@ -84,13 +90,11 @@ function remove_stack() { python_version=$parameters_set[1] run_id=$parameters_set[2] echo "Removing stack for stage : ${!run_id}" - PYTHON_VERSION=${!python_version} RUNTIME=$parameters_set SERVERLESS_RUNTIME=${!serverless_runtime} \ + PYTHON_VERSION=${!python_version} RUNTIME=$parameters_set SERVERLESS_RUNTIME=${!serverless_runtime} SLS_ARCH=${SERVERLESS_FRAMEWORK_ARCH} \ serverless remove --stage ${!run_id} done } - - trap remove_stack EXIT for parameters_set in "${PARAMETERS_SETS[@]}"; do serverless_runtime=$parameters_set[0] @@ -100,7 +104,7 @@ for parameters_set in "${PARAMETERS_SETS[@]}"; do echo "Deploying functions for runtime : $parameters_set, serverless runtime : ${!serverless_runtime}, \ python version : ${!python_version} and run id : ${!run_id}" - PYTHON_VERSION=${!python_version} RUNTIME=$parameters_set SERVERLESS_RUNTIME=${!serverless_runtime} \ + PYTHON_VERSION=${!python_version} RUNTIME=$parameters_set SERVERLESS_RUNTIME=${!serverless_runtime} ARCH=${ARCH} SLS_ARCH=${SERVERLESS_FRAMEWORK_ARCH} \ serverless deploy --stage ${!run_id} echo "Invoking functions for runtime $parameters_set" @@ -114,7 +118,7 @@ python version : ${!python_version} and run id : ${!run_id}" input_event_name=$(echo "$input_event_file" | sed "s/.json//") snapshot_path="./snapshots/return_values/${handler_name}_${input_event_name}.json" - return_value=$(PYTHON_VERSION=${!python_version} RUNTIME=$parameters_set SERVERLESS_RUNTIME=${!serverless_runtime} \ + return_value=$(PYTHON_VERSION=${!python_version} RUNTIME=$parameters_set SERVERLESS_RUNTIME=${!serverless_runtime} SLS_ARCH=${SERVERLESS_FRAMEWORK_ARCH} \ serverless invoke --stage ${!run_id} -f "$function_name" --path "./input_events/$input_event_file") if [ ! -f $snapshot_path ]; then @@ -156,7 +160,7 @@ for handler_name in "${LAMBDA_HANDLERS[@]}"; do # Fetch logs with serverless cli, retrying to avoid AWS account-wide rate limit error retry_counter=0 while [ $retry_counter -lt 10 ]; do - raw_logs=$(PYTHON_VERSION=${!python_version} RUNTIME=$parameters_set SERVERLESS_RUNTIME=${!serverless_runtime} \ + raw_logs=$(PYTHON_VERSION=${!python_version} RUNTIME=$parameters_set SERVERLESS_RUNTIME=${!serverless_runtime} ARCH=${ARCH} SLS_ARCH=${SERVERLESS_FRAMEWORK_ARCH} \ serverless logs --stage ${!run_id} -f $function_name --startTime $script_utc_start_time) fetch_logs_exit_code=$? if [ $fetch_logs_exit_code -eq 1 ]; then @@ -238,6 +242,7 @@ for handler_name in "${LAMBDA_HANDLERS[@]}"; do sed -E "s/(tracestate\:)([A-Za-z0-9\-\=\:\;].+)/\1XXX/g" | sed -E "s/(\"_dd.p.tid\"\: \")[a-z0-9\.\-]+/\1XXXX/g" | sed -E "s/(_dd.p.tid=)[a-z0-9\.\-]+/\1XXXX/g" | + sed -E 's/arch (aarch64|x86_64)/arch XXXX/g' | # Parse out account ID in ARN sed -E "s/([a-zA-Z0-9]+):([a-zA-Z0-9]+):([a-zA-Z0-9]+):([a-zA-Z0-9\-]+):([a-zA-Z0-9\-\:]+)/\1:\2:\3:\4:XXXX:\4/g" | sed -E "/init complete at epoch/d" | diff --git a/tests/integration/serverless.yml b/tests/integration/serverless.yml index bb64c397..fd174c5f 100644 --- a/tests/integration/serverless.yml +++ b/tests/integration/serverless.yml @@ -6,6 +6,7 @@ provider: region: eu-west-1 tracing: lambda: "PassThrough" + architecture: ${env:SLS_ARCH} environment: DD_INTEGRATION_TEST: true DD_TRACE_ENABLED: true @@ -23,7 +24,7 @@ provider: layers: python: package: - artifact: ../../.layers/datadog_lambda_py-amd64-${env:PYTHON_VERSION}.zip + artifact: ../../.layers/datadog_lambda_py-${env:ARCH}-${env:PYTHON_VERSION}.zip functions: # async-metrics (flushed to logs) diff --git a/tests/integration/snapshots/logs/sync-metrics_python310.log b/tests/integration/snapshots/logs/sync-metrics_python310.log index ac833bf0..e97b0cb2 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python310.log +++ b/tests/integration/snapshots/logs/sync-metrics_python310.log @@ -149,7 +149,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -172,7 +172,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -334,7 +334,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -357,7 +357,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -512,7 +512,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -535,7 +535,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -705,7 +705,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -728,7 +728,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -890,7 +890,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -913,7 +913,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1072,7 +1072,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -1095,7 +1095,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1255,7 +1255,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -1278,7 +1278,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1437,7 +1437,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -1460,7 +1460,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1627,7 +1627,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -1650,7 +1650,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" diff --git a/tests/integration/snapshots/logs/sync-metrics_python311.log b/tests/integration/snapshots/logs/sync-metrics_python311.log index 0e252e0f..84161ca2 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python311.log +++ b/tests/integration/snapshots/logs/sync-metrics_python311.log @@ -149,7 +149,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -172,7 +172,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -334,7 +334,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -357,7 +357,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -512,7 +512,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -535,7 +535,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -705,7 +705,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -728,7 +728,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -890,7 +890,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -913,7 +913,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1072,7 +1072,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -1095,7 +1095,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1255,7 +1255,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -1278,7 +1278,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1437,7 +1437,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -1460,7 +1460,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1627,7 +1627,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -1650,7 +1650,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" diff --git a/tests/integration/snapshots/logs/sync-metrics_python312.log b/tests/integration/snapshots/logs/sync-metrics_python312.log index 74ebff99..8b4c74cf 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python312.log +++ b/tests/integration/snapshots/logs/sync-metrics_python312.log @@ -149,7 +149,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -172,7 +172,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -334,7 +334,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -357,7 +357,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -512,7 +512,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -535,7 +535,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -705,7 +705,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -728,7 +728,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -890,7 +890,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -913,7 +913,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1072,7 +1072,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -1095,7 +1095,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1255,7 +1255,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -1278,7 +1278,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1437,7 +1437,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -1460,7 +1460,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1627,7 +1627,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -1650,7 +1650,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" diff --git a/tests/integration/snapshots/logs/sync-metrics_python38.log b/tests/integration/snapshots/logs/sync-metrics_python38.log index bd52bc1e..4fe60af6 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python38.log +++ b/tests/integration/snapshots/logs/sync-metrics_python38.log @@ -149,7 +149,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -172,7 +172,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -334,7 +334,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -357,7 +357,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -512,7 +512,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -535,7 +535,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -705,7 +705,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -728,7 +728,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -890,7 +890,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -913,7 +913,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1072,7 +1072,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -1095,7 +1095,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1255,7 +1255,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -1278,7 +1278,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1437,7 +1437,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -1460,7 +1460,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1627,7 +1627,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -1650,7 +1650,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" diff --git a/tests/integration/snapshots/logs/sync-metrics_python39.log b/tests/integration/snapshots/logs/sync-metrics_python39.log index 92a7cd33..98d74ed6 100644 --- a/tests/integration/snapshots/logs/sync-metrics_python39.log +++ b/tests/integration/snapshots/logs/sync-metrics_python39.log @@ -149,7 +149,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -172,7 +172,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -334,7 +334,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -357,7 +357,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -512,7 +512,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -535,7 +535,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -705,7 +705,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -728,7 +728,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -890,7 +890,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -913,7 +913,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1072,7 +1072,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -1095,7 +1095,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1255,7 +1255,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -1278,7 +1278,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1437,7 +1437,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -1460,7 +1460,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" @@ -1627,7 +1627,7 @@ HTTP GET https://www.datadoghq.com/ Headers: ["Accept-Encoding:gzip, deflate", " ] ] } -HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch x86_64)", "traceparent:XXX", "tracestate:XXX +HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept-Encoding:gzip, deflate", "Accept:*/*", "Connection:keep-alive", "Content-Encoding:deflate", "Content-Length:XXXX", "Content-Type:application/json", "DD-API-KEY:XXXX", "User-Agent:datadogpy/XX (python XX; os linux; arch XXXX)", "traceparent:XXX", "tracestate:XXX { "traces": [ [ @@ -1650,7 +1650,7 @@ HTTP POST https://api.datadoghq.com/api/v1/distribution_points Headers: ["Accept "http.url": "https://api.datadoghq.com/api/v1/distribution_points", "out.host": "api.datadoghq.com", "http.status_code": "202", - "http.useragent": "datadogpy/XX (python XX; os linux; arch x86_64)", + "http.useragent": "datadogpy/XX (python XX; os linux; arch XXXX)", "_dd.p.dm": "-0", "_dd.p.tid": "XXXX", "language": "python" diff --git a/tests/test_tracing.py b/tests/test_tracing.py index 3a28a2a3..3d047822 100644 --- a/tests/test_tracing.py +++ b/tests/test_tracing.py @@ -184,6 +184,8 @@ def test_with_non_object_event(self): {}, ) + """ + TODO(astuyve) I don't think partial extraction is forbidden anymore? ask rey @with_trace_propagation_style("datadog") def test_with_incomplete_datadog_trace_headers(self): lambda_ctx = get_mock_context() @@ -192,6 +194,7 @@ def test_with_incomplete_datadog_trace_headers(self): lambda_ctx, ) self.assertEqual(source, "xray") + print(ctx) self.assertEqual( ctx, Context( @@ -208,6 +211,7 @@ def test_with_incomplete_datadog_trace_headers(self): TraceHeader.SAMPLING_PRIORITY: "2", }, ) + """ @with_trace_propagation_style("datadog") def test_with_complete_datadog_trace_headers(self): @@ -261,6 +265,7 @@ def test_with_w3c_trace_headers(self): "traceparent": "00-0000000000000000000000000000007b-0000000000000141-01", "tracestate": "dd=s:2;t.dm:-0,rojo=00f067aa0ba902b7,congo=t61rcWkgMzE", "_dd.p.dm": "-0", + "_dd.parent_id": "0000000000000000", }, ) self.assertEqual(ctx, expected_context) @@ -268,7 +273,7 @@ def test_with_w3c_trace_headers(self): get_dd_trace_context(), { "traceparent": "00-0000000000000000000000000000007b-94ae789b969f1cc5-01", - "tracestate": "dd=s:2;t.dm:-0,rojo=00f067aa0ba902b7,congo=t61rcWkgMzE", + "tracestate": "dd=p:94ae789b969f1cc5;s:2;t.dm:-0,rojo=00f067aa0ba902b7,congo=t61rcWkgMzE", }, ) create_dd_dummy_metadata_subsegment(ctx, XraySubsegment.TRACE_KEY) @@ -453,6 +458,7 @@ def test_with_sqs_distributed_w3c_trace_data(self): "traceparent": "00-0000000000000000000000000000007b-0000000000000141-01", "tracestate": "dd=s:2;t.dm:-0,rojo=00f067aa0ba902b7,congo=t61rcWkgMzE", "_dd.p.dm": "-0", + "_dd.parent_id": "0000000000000000", }, ) self.assertEqual(ctx, expected_context) @@ -460,7 +466,7 @@ def test_with_sqs_distributed_w3c_trace_data(self): get_dd_trace_context(), { "traceparent": "00-0000000000000000000000000000007b-94ae789b969f1cc5-01", - "tracestate": "dd=s:2;t.dm:-0,rojo=00f067aa0ba902b7,congo=t61rcWkgMzE", + "tracestate": "dd=p:94ae789b969f1cc5;s:2;t.dm:-0,rojo=00f067aa0ba902b7,congo=t61rcWkgMzE", }, ) create_dd_dummy_metadata_subsegment(ctx, XraySubsegment.TRACE_KEY) @@ -523,14 +529,16 @@ def test_with_legacy_client_context_w3c_trace_data(self): "traceparent": "00-0000000000000000000000000000029a-0000000000000309-01", "tracestate": "dd=s:1;t.dm:-0,rojo=00f067aa0ba902b7,congo=t61rcWkgMzE", "_dd.p.dm": "-0", + "_dd.parent_id": "0000000000000000", }, ) + print(ctx) self.assertEqual(ctx, expected_context) self.assertDictEqual( get_dd_trace_context(), { "traceparent": "00-0000000000000000000000000000029a-94ae789b969f1cc5-01", - "tracestate": "dd=s:1;t.dm:-0,rojo=00f067aa0ba902b7,congo=t61rcWkgMzE", + "tracestate": "dd=p:94ae789b969f1cc5;s:1;t.dm:-0,rojo=00f067aa0ba902b7,congo=t61rcWkgMzE", }, ) create_dd_dummy_metadata_subsegment(ctx, XraySubsegment.TRACE_KEY) @@ -590,6 +598,7 @@ def test_with_new_client_context_w3c_trace_data(self): "traceparent": "00-0000000000000000000000000000029a-0000000000000309-01", "tracestate": "dd=s:1;t.dm:-0,rojo=00f067aa0ba902b7,congo=t61rcWkgMzE", "_dd.p.dm": "-0", + "_dd.parent_id": "0000000000000000", }, ) self.assertEqual(ctx, expected_context) @@ -597,7 +606,7 @@ def test_with_new_client_context_w3c_trace_data(self): get_dd_trace_context(), { "traceparent": "00-0000000000000000000000000000029a-94ae789b969f1cc5-01", - "tracestate": "dd=s:1;t.dm:-0,rojo=00f067aa0ba902b7,congo=t61rcWkgMzE", + "tracestate": "dd=p:94ae789b969f1cc5;s:1;t.dm:-0,rojo=00f067aa0ba902b7,congo=t61rcWkgMzE", }, ) create_dd_dummy_metadata_subsegment(ctx, XraySubsegment.TRACE_KEY) @@ -2231,7 +2240,7 @@ def test_mark_trace_as_error_for_5xx_responses_getting_400_response_code( def test_mark_trace_as_error_for_5xx_responses_sends_error_metric_and_set_error_tags( self, mock_submit_errors_metric ): - mock_span = Mock(ddtrace.span.Span) + mock_span = Mock(ddtrace.Span) status_code = "500" mark_trace_as_error_for_5xx_responses( context="fake_context", status_code=status_code, span=mock_span diff --git a/tests/test_wrapper.py b/tests/test_wrapper.py index 39998921..ee1ef99c 100644 --- a/tests/test_wrapper.py +++ b/tests/test_wrapper.py @@ -155,6 +155,7 @@ def lambda_handler(event, context): lambda_metric("test.metric", 100) time.sleep(11) # assert flushing in the thread + # TODO(astuyve) flaky test here, sometimes this is zero self.assertEqual(self.mock_threadstats_flush_distributions.call_count, 1) lambda_metric("test.metric", 200) @@ -502,6 +503,9 @@ def lambda_handler(event, context): self.mock_submit_invocations_metric.assert_called_once() def test_dd_requests_service_name_default(self): + # TODO(astuyve) this is now set by CI, so we need to null it out for this case + os.environ["DD_SERVICE"] = "aws.lambda" + @wrapper.datadog_lambda_wrapper def lambda_handler(event, context): pass From 90dcd4969a9f75ec4907f110d742c98ac7c83b7b Mon Sep 17 00:00:00 2001 From: AJ Stuyvenberg Date: Mon, 4 Mar 2024 16:47:20 -0500 Subject: [PATCH 04/18] feat: Disable instrumentation telemetry by default (#423) --- datadog_lambda/tracing.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/datadog_lambda/tracing.py b/datadog_lambda/tracing.py index dc7e32b2..948a19e5 100644 --- a/datadog_lambda/tracing.py +++ b/datadog_lambda/tracing.py @@ -28,6 +28,10 @@ send_segment, parse_xray_header, ) + +if os.environ.get("DD_INSTRUMENTATION_TELEMETRY_ENABLED") is None: + os.environ["DD_INSTRUMENTATION_TELEMETRY_ENABLED"] = "false" + from ddtrace import tracer, patch, Span from ddtrace import __version__ as ddtrace_version from ddtrace.propagation.http import HTTPPropagator From 57b280e11d0aeb1e50cf5a0f04bba0a21078bb51 Mon Sep 17 00:00:00 2001 From: AJ Stuyvenberg Date: Mon, 4 Mar 2024 19:31:18 -0500 Subject: [PATCH 05/18] fix: wrong py version (#426) --- ci/datasources/runtimes.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/datasources/runtimes.yaml b/ci/datasources/runtimes.yaml index 0e084b08..62fd0279 100644 --- a/ci/datasources/runtimes.yaml +++ b/ci/datasources/runtimes.yaml @@ -8,7 +8,7 @@ runtimes: arch: "arm64" image: "3.8" - name: "python39" - python_version: "3.8" + python_version: "3.9" arch: "amd64" image: "3.9" - name: "python39" From 8b44ee3b246ddd99148d9af572beb691c77007c8 Mon Sep 17 00:00:00 2001 From: AJ Stuyvenberg Date: Mon, 4 Mar 2024 19:32:18 -0500 Subject: [PATCH 06/18] fix: Wrong description (#425) --- ci/publish_layers.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/publish_layers.sh b/ci/publish_layers.sh index 31a91839..f94a0bc3 100755 --- a/ci/publish_layers.sh +++ b/ci/publish_layers.sh @@ -60,7 +60,7 @@ publish_layer() { layer_path=$4 version_nbr=$(aws lambda publish-layer-version --layer-name $layer_name \ - --description "Datadog Lambda Layer for Node" \ + --description "Datadog Lambda Layer for Python" \ --zip-file "fileb://$layer_path" \ --region $region \ --compatible-runtimes $compatible_runtimes \ From a06668d72cf968435e3ca95605f91207034d080f Mon Sep 17 00:00:00 2001 From: AJ Stuyvenberg Date: Tue, 5 Mar 2024 09:19:49 -0500 Subject: [PATCH 07/18] feat: Move env setting to init.py. Disable api security (#424) --- datadog_lambda/__init__.py | 8 ++++++++ datadog_lambda/tracing.py | 3 --- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/datadog_lambda/__init__.py b/datadog_lambda/__init__.py index cfb5f8e3..5cc2ba00 100644 --- a/datadog_lambda/__init__.py +++ b/datadog_lambda/__init__.py @@ -1,5 +1,13 @@ from datadog_lambda.cold_start import initialize_cold_start_tracing from datadog_lambda.logger import initialize_logging +import os + + +if os.environ.get("DD_INSTRUMENTATION_TELEMETRY_ENABLED") is None: + os.environ["DD_INSTRUMENTATION_TELEMETRY_ENABLED"] = "false" + +if os.environ.get("DD_API_SECURITY_ENABLED") is None: + os.environ["DD_API_SECURITY_ENABLED"] = "False" initialize_cold_start_tracing() diff --git a/datadog_lambda/tracing.py b/datadog_lambda/tracing.py index 948a19e5..f032059b 100644 --- a/datadog_lambda/tracing.py +++ b/datadog_lambda/tracing.py @@ -29,9 +29,6 @@ parse_xray_header, ) -if os.environ.get("DD_INSTRUMENTATION_TELEMETRY_ENABLED") is None: - os.environ["DD_INSTRUMENTATION_TELEMETRY_ENABLED"] = "false" - from ddtrace import tracer, patch, Span from ddtrace import __version__ as ddtrace_version from ddtrace.propagation.http import HTTPPropagator From 0cf9c56d09fb2c050ddbc48c2441fefcf8362c38 Mon Sep 17 00:00:00 2001 From: AJ Stuyvenberg Date: Tue, 5 Mar 2024 09:20:28 -0500 Subject: [PATCH 08/18] feat: Remove http check for extension hello route (#422) * feat: Remove http check for extension hello route * feat: lint --- datadog_lambda/extension.py | 27 +++++++-------------------- tests/test_extension.py | 13 +++---------- 2 files changed, 10 insertions(+), 30 deletions(-) diff --git a/datadog_lambda/extension.py b/datadog_lambda/extension.py index d66848ff..159048d7 100644 --- a/datadog_lambda/extension.py +++ b/datadog_lambda/extension.py @@ -1,36 +1,23 @@ import logging from os import path -try: - # only available in python 3 - # not an issue since the extension is not compatible with python 2.x runtime - # https://docs.aws.amazon.com/lambda/latest/dg/using-extensions.html - import urllib.request -except ImportError: - # safe since both calls to urllib are protected with try/expect and will return false - urllib = None - AGENT_URL = "http://127.0.0.1:8124" -HELLO_PATH = "/lambda/hello" FLUSH_PATH = "/lambda/flush" EXTENSION_PATH = "/opt/extensions/datadog-agent" logger = logging.getLogger(__name__) -def is_extension_running(): - if not path.exists(EXTENSION_PATH): - return False - try: - urllib.request.urlopen(AGENT_URL + HELLO_PATH) - except Exception as e: - logger.debug("Extension is not running, returned with error %s", e) - return False - return True +def is_extension_present(): + if path.exists(EXTENSION_PATH): + return True + return False def flush_extension(): try: + import urllib.request + req = urllib.request.Request(AGENT_URL + FLUSH_PATH, "".encode("ascii")) urllib.request.urlopen(req) except Exception as e: @@ -39,4 +26,4 @@ def flush_extension(): return True -should_use_extension = is_extension_running() +should_use_extension = is_extension_present() diff --git a/tests/test_extension.py b/tests/test_extension.py index 5ecb0e36..92142a9e 100644 --- a/tests/test_extension.py +++ b/tests/test_extension.py @@ -6,7 +6,7 @@ from unittest.mock import patch from datadog_lambda.extension import ( - is_extension_running, + is_extension_present, flush_extension, should_use_extension, ) @@ -48,19 +48,12 @@ def tearDown(self): @patch("datadog_lambda.extension.EXTENSION_PATH", os.path.abspath(__file__)) def test_is_extension_running_true(self): - assert is_extension_running() - assert self.server.called + assert is_extension_present() def test_is_extension_running_file_not_found(self): - assert not is_extension_running() + assert not is_extension_present() assert not self.server.called - @patch("datadog_lambda.extension.EXTENSION_PATH", os.path.abspath(__file__)) - def test_is_extension_running_http_failure(self): - self.server.raises = True - assert not is_extension_running() - assert self.server.called - @patch("datadog_lambda.extension.EXTENSION_PATH", os.path.abspath(__file__)) def test_flush_ok(self): assert flush_extension() From d481a72cd60e260af2797f30baf91eda5097a856 Mon Sep 17 00:00:00 2001 From: AJ Stuyvenberg Date: Tue, 5 Mar 2024 16:01:47 -0500 Subject: [PATCH 09/18] Remove iast and ddwaf (#427) * feat: Rip out appsec * feat: Remove more appsec stuff * empty commit to trigger ci --- Dockerfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Dockerfile b/Dockerfile index a2da16f9..757d0c00 100644 --- a/Dockerfile +++ b/Dockerfile @@ -23,6 +23,9 @@ RUN find . -name '*.so' -exec strip -g {} \; RUN rm -rf ./python/lib/$runtime/site-packages/botocore* RUN rm -rf ./python/lib/$runtime/site-packages/setuptools RUN rm -rf ./python/lib/$runtime/site-packages/jsonschema/tests +RUN find . -name 'libddwaf.so' -delete +RUN rm ./python/lib/$runtime/site-packages/ddtrace/appsec/_iast/_taint_tracking/*.so +RUN rm ./python/lib/$runtime/site-packages/ddtrace/appsec/_iast/_stacktrace*.so FROM scratch COPY --from=builder /build/python / From 2b8d31d2129cd32ce6377e0f12486e2563554eef Mon Sep 17 00:00:00 2001 From: AJ Stuyvenberg Date: Tue, 5 Mar 2024 16:14:18 -0500 Subject: [PATCH 10/18] feat: ddtrace py now sets a default sample rate header, so we don't fall into this case anymore (#428) --- tests/test_tracing.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/tests/test_tracing.py b/tests/test_tracing.py index 3d047822..d14ad1e9 100644 --- a/tests/test_tracing.py +++ b/tests/test_tracing.py @@ -184,17 +184,14 @@ def test_with_non_object_event(self): {}, ) - """ - TODO(astuyve) I don't think partial extraction is forbidden anymore? ask rey @with_trace_propagation_style("datadog") def test_with_incomplete_datadog_trace_headers(self): lambda_ctx = get_mock_context() ctx, source, event_source = extract_dd_trace_context( - {"headers": {TraceHeader.TRACE_ID: "123", TraceHeader.PARENT_ID: "321"}}, + {"headers": {TraceHeader.TRACE_ID: "123"}}, lambda_ctx, ) self.assertEqual(source, "xray") - print(ctx) self.assertEqual( ctx, Context( @@ -211,7 +208,6 @@ def test_with_incomplete_datadog_trace_headers(self): TraceHeader.SAMPLING_PRIORITY: "2", }, ) - """ @with_trace_propagation_style("datadog") def test_with_complete_datadog_trace_headers(self): @@ -532,7 +528,6 @@ def test_with_legacy_client_context_w3c_trace_data(self): "_dd.parent_id": "0000000000000000", }, ) - print(ctx) self.assertEqual(ctx, expected_context) self.assertDictEqual( get_dd_trace_context(), From b2e20847d83ccad77db9398cd9829d82452f36fb Mon Sep 17 00:00:00 2001 From: AJ Stuyvenberg Date: Wed, 6 Mar 2024 10:26:18 -0500 Subject: [PATCH 11/18] feat: remove check-size from github workflow (#429) --- .github/workflows/check-size.yml | 33 -------------------------------- 1 file changed, 33 deletions(-) delete mode 100644 .github/workflows/check-size.yml diff --git a/.github/workflows/check-size.yml b/.github/workflows/check-size.yml deleted file mode 100644 index 9a60bfe9..00000000 --- a/.github/workflows/check-size.yml +++ /dev/null @@ -1,33 +0,0 @@ -name: check-size - -on: pull_request - -jobs: - check-size: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v3 - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3.12 - - - name: Install Crossbuild Deps - run: | - sudo apt-get update --allow-releaseinfo-change --fix-missing - sudo apt install -y qemu-user-static binfmt-support - - - name: Install dependencies - run: | - pip install virtualenv - virtualenv venv - source venv/bin/activate - pip install .[dev] - - - name: Build Layers - run: ./scripts/build_layers.sh - - - name: Check Size - run: ./scripts/check_layer_size.sh From 1f4593d5bf3f07b9632888982dac68ba5f61c26d Mon Sep 17 00:00:00 2001 From: AJ Stuyvenberg Date: Wed, 6 Mar 2024 10:35:45 -0500 Subject: [PATCH 12/18] v5.89.0 (#430) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 5dbd9e5b..007f47c6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "datadog_lambda" -version = "5.88.0" +version = "5.89.0" description = "The Datadog AWS Lambda Library" authors = ["Datadog, Inc. "] license = "Apache-2.0" From 51fefdf4d0650fcc609763f8abb8a3c77038371f Mon Sep 17 00:00:00 2001 From: AJ Stuyvenberg Date: Wed, 6 Mar 2024 11:01:22 -0500 Subject: [PATCH 13/18] Revert "v5.89.0 (#430)" (#431) This reverts commit 1f4593d5bf3f07b9632888982dac68ba5f61c26d. --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 007f47c6..5dbd9e5b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "datadog_lambda" -version = "5.89.0" +version = "5.88.0" description = "The Datadog AWS Lambda Library" authors = ["Datadog, Inc. "] license = "Apache-2.0" From a132b042fff2a471330a97ef8cbcb741651ac1f2 Mon Sep 17 00:00:00 2001 From: AJ Stuyvenberg Date: Wed, 6 Mar 2024 11:07:27 -0500 Subject: [PATCH 14/18] feat: Signing job should run as one-shot or loop over layers (#432) --- scripts/sign_layers.sh | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/scripts/sign_layers.sh b/scripts/sign_layers.sh index 7e748f70..700bd55a 100755 --- a/scripts/sign_layers.sh +++ b/scripts/sign_layers.sh @@ -41,6 +41,18 @@ if [ "$1" = "prod" ]; then S3_BUCKET_NAME="dd-lambda-signing-bucket" fi +if [ -z "$LAYER_FILE" ]; then + echo "Layer file not specified, running for all layer files." +else + echo "Layer file is specified: $LAYER_FILE" + if (printf '%s\n' "${LAYER_FILES[@]}" | grep -xq $LAYER_FILE); then + LAYER_FILES=($LAYER_FILE) + else + echo "Unsupported layer found, valid options are : ${LAYER_FILES[@]}" + exit 1 + fi +fi + for LAYER_FILE in "${LAYER_FILES[@]}" do echo From 9b30329795d287157815e3bb908341b99fffdc9e Mon Sep 17 00:00:00 2001 From: AJ Stuyvenberg Date: Wed, 6 Mar 2024 12:32:43 -0500 Subject: [PATCH 15/18] v5.89.0 (#433) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 5dbd9e5b..007f47c6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "datadog_lambda" -version = "5.88.0" +version = "5.89.0" description = "The Datadog AWS Lambda Library" authors = ["Datadog, Inc. "] license = "Apache-2.0" From df9e164d025d85138f06e374ecd1e764071c9272 Mon Sep 17 00:00:00 2001 From: AJ Stuyvenberg Date: Wed, 6 Mar 2024 12:53:58 -0500 Subject: [PATCH 16/18] feat: Fix py layer name (#434) --- ci/input_files/build.yaml.tpl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ci/input_files/build.yaml.tpl b/ci/input_files/build.yaml.tpl index e562092d..68efeea8 100644 --- a/ci/input_files/build.yaml.tpl +++ b/ci/input_files/build.yaml.tpl @@ -115,13 +115,13 @@ sign-layer ({{ $runtime.name }}-{{ $runtime.arch }}): artifacts: # Re specify artifacts so the modified signed file is passed expire_in: 1 day # Signed layers should expire after 1 day paths: - - .layers/datadog_lambda_python-{{ $runtime.arch }}-{{ $runtime.python_version }}.zip + - .layers/datadog_lambda_py-{{ $runtime.arch }}-{{ $runtime.python_version }}.zip before_script: - apt-get update - apt-get install -y uuid-runtime - EXTERNAL_ID_NAME={{ $environment.external_id }} ROLE_TO_ASSUME={{ $environment.role_to_assume }} AWS_ACCOUNT={{ $environment.account }} source ./ci/get_secrets.sh script: - - LAYER_FILE=datadog_lambda_python-{{ $runtime.arch}}-{{ $runtime.python_version }}.zip ./scripts/sign_layers.sh {{ $environment.name }} + - LAYER_FILE=datadog_lambda_py-{{ $runtime.arch}}-{{ $runtime.python_version }}.zip ./scripts/sign_layers.sh {{ $environment.name }} {{ end }} publish-layer-{{ $environment.name }} ({{ $runtime.name }}-{{ $runtime.arch }}): From 36cb0527e3d0c899eefa6c67a1128eb8531e6a01 Mon Sep 17 00:00:00 2001 From: AJ Stuyvenberg Date: Wed, 6 Mar 2024 12:55:57 -0500 Subject: [PATCH 17/18] Revert "v5.89.0 (#433)" (#435) This reverts commit 9b30329795d287157815e3bb908341b99fffdc9e. --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 007f47c6..5dbd9e5b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "datadog_lambda" -version = "5.89.0" +version = "5.88.0" description = "The Datadog AWS Lambda Library" authors = ["Datadog, Inc. "] license = "Apache-2.0" From 537011171b22be8cdb8cc752a112c327b1059392 Mon Sep 17 00:00:00 2001 From: AJ Stuyvenberg Date: Wed, 6 Mar 2024 13:34:19 -0500 Subject: [PATCH 18/18] release v5.89.0 (#437) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 5dbd9e5b..007f47c6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "datadog_lambda" -version = "5.88.0" +version = "5.89.0" description = "The Datadog AWS Lambda Library" authors = ["Datadog, Inc. "] license = "Apache-2.0"