diff --git a/Dockerfile b/Dockerfile index 757d0c00..396ef25c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,18 +14,18 @@ RUN pip install . -t ./python/lib/$runtime/site-packages # Remove *.pyc files RUN find ./python/lib/$runtime/site-packages -name \*.pyc -delete -# Strip symbols from ddtrace's binaries. -# TODO (AJ): remove when ddtrace fixes this upstream -RUN find . -name '*.so' -exec strip -g {} \; - # Remove botocore (40MB) to reduce package size. aws-xray-sdk # installs it, while it's already provided by the Lambda Runtime. RUN rm -rf ./python/lib/$runtime/site-packages/botocore* RUN rm -rf ./python/lib/$runtime/site-packages/setuptools RUN rm -rf ./python/lib/$runtime/site-packages/jsonschema/tests RUN find . -name 'libddwaf.so' -delete +RUN rm -rf ./python/lib/$runtime/site-packages/urllib3 RUN rm ./python/lib/$runtime/site-packages/ddtrace/appsec/_iast/_taint_tracking/*.so RUN rm ./python/lib/$runtime/site-packages/ddtrace/appsec/_iast/_stacktrace*.so +RUN rm ./python/lib/$runtime/site-packages/ddtrace/internal/datadog/profiling/libdd_wrapper.so +RUN rm ./python/lib/$runtime/site-packages/ddtrace/internal/datadog/profiling/ddup/_ddup.*.so +RUN rm ./python/lib/$runtime/site-packages/ddtrace/internal/datadog/profiling/stack_v2/_stack_v2.*.so FROM scratch COPY --from=builder /build/python / diff --git a/ci/input_files/build.yaml.tpl b/ci/input_files/build.yaml.tpl index 68efeea8..449f6978 100644 --- a/ci/input_files/build.yaml.tpl +++ b/ci/input_files/build.yaml.tpl @@ -9,6 +9,7 @@ stages: - virtualenv venv - source venv/bin/activate - pip install .[dev] + - pip install poetry # This is for serverless framework .install-node: &install-node @@ -53,7 +54,7 @@ check-layer-size ({{ $runtime.name }}-{{ $runtime.arch }}): dependencies: - build-layer ({{ $runtime.name }}-{{ $runtime.arch }}) script: - - PYTHON_VERSION={{ $runtime.python_version }} ./scripts/check_layer_size.sh + - PYTHON_VERSION={{ $runtime.python_version }} ARCH={{ $runtime.arch }} ./scripts/check_layer_size.sh lint python: stage: test @@ -167,6 +168,7 @@ publish-pypi-package: stage: publish tags: ["arch:amd64"] image: registry.ddbuild.io/images/docker:20.10-py3 + before_script: *python-before-script cache: [] rules: - if: '$CI_COMMIT_TAG =~ /^v.*/' diff --git a/ci/publish_pypi.sh b/ci/publish_pypi.sh index d7ec78fd..c01df7ef 100755 --- a/ci/publish_pypi.sh +++ b/ci/publish_pypi.sh @@ -13,6 +13,14 @@ PYPI_TOKEN=$(aws ssm get-parameter \ --out text) # Builds the lambda layer and upload to Pypi +if [ -z "$CI_COMMIT_TAG" ]; then + printf "[Error] No CI_COMMIT_TAG found.\n" + printf "Exiting script...\n" + exit 1 +else + printf "Tag found in environment: $CI_COMMIT_TAG\n" +fi + # Clear previously built distributions if [ -d "dist" ]; then echo "Removing folder 'dist' to clear previously built distributions" diff --git a/datadog_lambda/cold_start.py b/datadog_lambda/cold_start.py index 9da02e78..9dcbec23 100644 --- a/datadog_lambda/cold_start.py +++ b/datadog_lambda/cold_start.py @@ -8,6 +8,7 @@ _cold_start = True _proactive_initialization = False _lambda_container_initialized = False +_tracer = None def set_cold_start(init_timestamp_ns): @@ -18,6 +19,7 @@ def set_cold_start(init_timestamp_ns): global _cold_start global _lambda_container_initialized global _proactive_initialization + global _tracer if not _lambda_container_initialized: now = time.time_ns() if (now - init_timestamp_ns) // 1_000_000_000 > 10: @@ -29,6 +31,7 @@ def set_cold_start(init_timestamp_ns): _cold_start = False _proactive_initialization = False _lambda_container_initialized = True + from ddtrace import tracer as _tracer def is_cold_start(): @@ -62,6 +65,9 @@ def __init__(self, module_name, full_file_path, start_time_ns, end_time_ns=None) self.start_time_ns = start_time_ns self.end_time_ns = end_time_ns self.children = [] + self.context = None + if _lambda_container_initialized: + self.context = _tracer.context_provider.active() root_nodes: List[ImportNode] = [] @@ -70,10 +76,8 @@ def __init__(self, module_name, full_file_path, start_time_ns, end_time_ns=None) def reset_node_stacks(): - global root_nodes - root_nodes = [] - global import_stack - import_stack = [] + root_nodes.clear() + import_stack.clear() def push_node(module_name, file_path): @@ -183,7 +187,8 @@ def trace(self, root_nodes: List[ImportNode] = root_nodes): cold_start_span = self.create_cold_start_span(cold_start_span_start_time_ns) while root_nodes: root_node = root_nodes.pop() - self.trace_tree(root_node, cold_start_span) + parent = root_node.context or cold_start_span + self.trace_tree(root_node, parent) self.finish_span(cold_start_span, cold_start_span_end_time_ns) def trace_tree(self, import_node: ImportNode, parent_span): diff --git a/pyproject.toml b/pyproject.toml index 94b73d03..806485b6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "datadog_lambda" -version = "5.90.0" +version = "5.91.0" description = "The Datadog AWS Lambda Library" authors = ["Datadog, Inc. "] license = "Apache-2.0" diff --git a/scripts/check_layer_size.sh b/scripts/check_layer_size.sh index f1c8c813..84752fa1 100755 --- a/scripts/check_layer_size.sh +++ b/scripts/check_layer_size.sh @@ -8,8 +8,9 @@ # Compares layer size to threshold, and fails if below that threshold # 7 mb size limit -MAX_LAYER_COMPRESSED_SIZE_KB=$(expr 7 \* 1024) -MAX_LAYER_UNCOMPRESSED_SIZE_KB=$(expr 24 \* 1024) +set -e +MAX_LAYER_COMPRESSED_SIZE_KB=$(expr 4 \* 1024) +MAX_LAYER_UNCOMPRESSED_SIZE_KB=$(expr 13 \* 1024) LAYER_FILES_PREFIX="datadog_lambda_py" diff --git a/tests/test_cold_start.py b/tests/test_cold_start.py index 65193e1d..2ce37e7c 100644 --- a/tests/test_cold_start.py +++ b/tests/test_cold_start.py @@ -1,10 +1,13 @@ +import os import time import unittest -import datadog_lambda.cold_start as cold_start + from sys import modules, meta_path -import os from unittest.mock import MagicMock +import datadog_lambda.cold_start as cold_start +import datadog_lambda.wrapper as wrapper + class TestColdStartTracingSetup(unittest.TestCase): def test_proactive_init(self): @@ -234,3 +237,42 @@ def test_trace_ignore_libs(self): self.cold_start_tracer.trace(nodes) self.mock_activate.assert_called_once_with(self.mock_trace_ctx) self.assertEqual(self.output_spans, ["node_0", "unittest_cold_start"]) + + +def test_lazy_loaded_package_imports(monkeypatch): + + spans = [] + + def finish(span): + spans.append(span) + + monkeypatch.setattr(wrapper.tracer, "_on_span_finish", finish) + monkeypatch.setattr(wrapper, "is_new_sandbox", lambda: True) + monkeypatch.setattr("datadog_lambda.wrapper.dd_tracing_enabled", True) + monkeypatch.setenv( + "DD_COLD_START_TRACE_SKIP_LIB", "ddtrace.contrib.logging,datadog_lambda.wrapper" + ) + monkeypatch.setenv("DD_MIN_COLD_START_DURATION", "0") + + @wrapper.datadog_lambda_wrapper + def handler(event, context): + import tabnanny + + lambda_context = MagicMock() + lambda_context.invoked_function_arn = ( + "arn:aws:lambda:us-west-1:123457598159:function:python-layer-test:1" + ) + + handler.cold_start_tracing = True + handler({}, lambda_context) + + function_span = import_span = None + for span in spans: + if span.resource == "tabnanny": + import_span = span + elif span.name == "aws.lambda": + function_span = span + + assert function_span is not None + assert import_span is not None + assert import_span.parent_id == function_span.span_id