10000 Consolidate env reading to single config object. (#600) · DataDog/datadog-lambda-python@b3cf1c2 · GitHub
[go: up one dir, main page]

Skip to content

Commit b3cf1c2

Browse files
authored
Consolidate env reading to single config object. (#600)
1 parent 6beb65d commit b3cf1c2

20 files changed

+534
-292
lines changed

datadog_lambda/api.py

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import logging
22
import os
33

4-
from datadog_lambda.fips import fips_mode_enabled
4+
from datadog_lambda.config import config
55

66
logger = logging.getLogger(__name__)
77
KMS_ENCRYPTION_CONTEXT_KEY = "LambdaFunctionName"
@@ -29,7 +29,6 @@ def decrypt_kms_api_key(kms_client, ciphertext):
2929
is added. We need to try decrypting the API key both with and without the encryption context.
3030
"""
3131
# Try without encryption context, in case API key was encrypted using the AWS CLI
32-
function_name = os.environ.get("AWS_LAMBDA_FUNCTION_NAME")
3332
try:
3433
plaintext = kms_client.decrypt(CiphertextBlob=decoded_bytes)[
3534
"Plaintext"
@@ -43,7 +42,7 @@ def decrypt_kms_api_key(kms_client, ciphertext):
4342
plaintext = kms_client.decrypt(
4443
CiphertextBlob=decoded_bytes,
4544
EncryptionContext={
46-
KMS_ENCRYPTION_CONTEXT_KEY: function_name,
45+
KMS_ENCRYPTION_CONTEXT_KEY: config.function_name,
4746
},
4847
)["Plaintext"].decode("utf-8")
4948

@@ -66,7 +65,7 @@ def get_api_key() -> str:
6665
DD_API_KEY = os.environ.get("DD_API_KEY", os.environ.get("DATADOG_API_KEY", ""))
6766

6867
LAMBDA_REGION = os.environ.get("AWS_REGION", "")
69-
if fips_mode_enabled:
68+
if config.fips_mode_enabled:
7069
logger.debug(
7170
"FIPS mode is enabled, using FIPS endpoints for secrets management."
7271
)
@@ -82,7 +81,7 @@ def get_api_key() -> str:
8281
return ""
8382
endpoint_url = (
8483
f"https://secretsmanager-fips.{secrets_region}.amazonaws.com"
85-
if fips_mode_enabled
84+
if config.fips_mode_enabled
8685
else None
8786
)
8887
secrets_manager_client = _boto3_client(
@@ -95,7 +94,7 @@ def get_api_key() -> str:
9594
# SSM endpoints: https://docs.aws.amazon.com/general/latest/gr/ssm.html
9695
fips_endpoint = (
9796
f"https://ssm-fips.{LAMBDA_REGION}.amazonaws.com"
98-
if fips_mode_enabled
97+
if config.fips_mode_enabled
9998
else None
10099
)
101100
ssm_client = _boto3_client("ssm", endpoint_url=fips_endpoint)
@@ -106,7 +105,7 @@ def get_api_key() -> str:
106105
# KMS endpoints: https://docs.aws.amazon.com/general/latest/gr/kms.html
107106
fips_endpoint = (
108107
f"https://kms-fips.{LAMBDA_REGION}.amazonaws.com"
109-
if fips_mode_enabled
108+
if config.fips_mode_enabled
110109
else None
111110
)
112111
kms_client = _boto3_client("kms", endpoint_url=fips_endpoint)
@@ -118,7 +117,7 @@ def get_api_key() -> str:
118117

119118

120119
def init_api():
121-
if not os.environ.get("DD_FLUSH_TO_LOG", "").lower() == "true":
120+
if not config.flush_to_log:
122121
# Make sure that this package would always be lazy-loaded/outside from the critical path
123122
# since underlying packages are quite heavy to load
124123
# and useless with the extension unless sending metrics with timestamps

datadog_lambda/cold_start.py

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
import time
2-
import os
32
from typing import List, Hashable
43
import logging
54

5+
from datadog_lambda.config import config
6+
67
logger = logging.getLogger(__name__)
78

89
_cold_start = True
@@ -86,14 +87,12 @@ def reset_node_stacks():
8687

8788
def push_node(module_name, file_path):
8889
node = ImportNode(module_name, file_path, time.time_ns())
89-
global import_stack
9090
if import_stack:
9191
import_stack[-1].children.append(node)
9292
import_stack.append(node)
9393

9494

9595
def pop_node(module_name):
96-
global import_stack
9796
if not import_stack:
9897
return
9998
node = import_stack.pop()
@@ -102,7 +101,6 @@ def pop_node(module_name):
102101
end_time_ns = time.time_ns()
103102
node.end_time_ns = end_time_ns
104103
if not import_stack: # import_stack empty, a root node has been found
105-
global root_nodes
106104
root_nodes.append(node)
107105

108106

@@ -147,11 +145,7 @@ def wrapped_find_spec(*args, **kwargs):
147145

148146

149147
def initialize_cold_start_tracing():
150-
if (
151-
is_new_sandbox()
152-
and os.environ.get("DD_TRACE_ENABLED", "true").lower() == "true"
153-
and os.environ.get("DD_COLD_START_TRACING", "true").lower() == "true"
154-
):
148+
if is_new_sandbox() and config.cold_start_tracing:
155149
from sys import meta_path
156150

157151
for importer in meta_path:

datadog_lambda/config.py

Lines changed: 145 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,145 @@
1+
# Unless explicitly stated otherwise all files in this repository are licensed
2+
# under the Apache License Version 2.0.
3+
# This product includes software developed at Datadog (https://www.datadoghq.com/).
4+
# Copyright 2019 Datadog, Inc.
5+
6+
import logging
7+
import os
8+
9+
logger = logging.getLogger(__name__)
10+
11+
12+
def _get_env(key, default=None, cast=None, depends_on_tracing=False):
13+
@property
14+
def _getter(self):
15+
if not hasattr(self, prop_key):
16+
val = self._resolve_env(key, default, cast, depends_on_tracing)
17+
setattr(self, prop_key, val)
18+
return getattr(self, prop_key)
19+
20+
prop_key = f"_config_{key}"
21+
return _getter
22+
23+
24+
def as_bool(val):
25+
return val.lower() == "true" or val == "1"
26+
27+
28+
def as_list(val):
29+
return [val.strip() for val in val.split(",") if val.strip()]
30+
31+
32+
class Config:
33+
def _resolve_env(self, key, default=None, cast=None, depends_on_tracing=False):
34+
if depends_on_tracing and not self.trace_enabled:
35+
return False
36+
val = os.environ.get(key, default)
37+
if cast is not None:
38+
try:
39+
val = cast(val)
40+
except (ValueError, TypeError):
41+
msg = (
42+
"Failed to cast environment variable '%s' with "
43+
"value '%s' to type %s. Using default value '%s'."
44+
)
45+
logger.warning(msg, key, val, cast.__name__, default)
46+
val = default
47+
return val
48+
49+
service = _get_env("DD_SERVICE")
50+
env = _get_env("DD_ENV")
51+
52+
cold_start_tracing = _get_env(
53+
"DD_COLD_START_TRACING", "true", as_bool, depends_on_tracing=True
54+
)
55+
min_cold_start_trace_duration = _get_env("DD_MIN_COLD_START_DURATION", 3, int)
56+
cold_start_trace_skip_lib = _get_env(
57+
"DD_COLD_START_TRACE_SKIP_LIB",
58+
"ddtrace.internal.compat,ddtrace.filters",
59+
as_list,
60+
)
61+
62+
capture_payload_max_depth = _get_env("DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH", 10, int)
63+
capture_payload_enabled = _get_env("DD_CAPTURE_LAMBDA_PAYLOAD", "false", as_bool)
64+
65+
trace_enabled = _get_env("DD_TRACE_ENABLED", "true", as_bool)
66+
make_inferred_span = _get_env(
67+
"DD_TRACE_MANAGED_SERVICES", "true", as_bool, depends_on_tracing=True
68+
)
69+
encode_authorizer_context = _get_env(
70+
"DD_ENCODE_AUTHORIZER_CONTEXT", "true", as_bool, depends_on_tracing=True
71+
)
72+
decode_authorizer_context = _get_env(
73+
"DD_DECODE_AUTHORIZER_CONTEXT", "true", as_bool, depends_on_tracing=True
74+
)
75+
add_span_pointers = _get_env("DD_BOTOCORE_ADD_SPAN_POINTERS", "true", as_bool)
76+
trace_extractor = _get_env("DD_TRACE_EXTRACTOR")
77+
78+
enhanced_metrics_enabled = _get_env("DD_ENHANCED_METRICS", "true", as_bool)
79+
80+
flush_in_thread = _get_env("DD_FLUSH_IN_THREAD", "false", as_bool)
81+
flush_to_log = _get_env("DD_FLUSH_TO_LOG", "false", as_bool)
82+
logs_injection = _get_env("DD_LOGS_INJECTION", "true", as_bool)
83+
merge_xray_traces = _get_env("DD_MERGE_XRAY_TRACES", "false", as_bool)
84+
85+
telemetry_enabled = _get_env(
86+
"DD_INSTRUMENTATION_TELEMETRY_ENABLED",
87+
"false",
88+
as_bool,
89+
depends_on_tracing=True,
90+
)
91+
otel_enabled = _get_env("DD_TRACE_OTEL_ENABLED", "false", as_bool)
92+
profiling_enabled = _get_env("DD_PROFILING_ENABLED", "false", as_bool)
93+
llmobs_enabled = _get_env("DD_LLMOBS_ENABLED", "false", as_bool)
94+
exception_replay_enabled = _get_env("DD_EXCEPTION_REPLAY_ENABLED", "false", as_bool)
95+
data_streams_enabled = _get_env(
96+
"DD_DATA_STREAMS_ENABLED", "false", as_bool, depends_on_tracing=True
97+
)
98+
99+
is_gov_region = _get_env("AWS_REGION", "", lambda x: x.startswith("us-gov-"))
100+
101+
local_test = _get_env("DD_LOCAL_TEST", "false", as_bool)
102+
integration_test = _get_env("DD_INTEGRATION_TEST", "false", as_bool)
103+
104+
aws_lambda_function_name = _get_env("AWS_LAMBDA_FUNCTION_NAME")
105+
106+
@property
107+
def function_name(self):
108+
if not hasattr(self, "_config_function_name"):
109+
if self.aws_lambda_function_name is None:
110+
self._config_function_name = "function"
111+
else:
112+
self._config_function_name = self.aws_lambda_function_name
113+
return self._config_function_name
114+
115+
@property
116+
def is_lambda_context(self):
117+
if not hasattr(self, "_config_is_lambda_context"):
118+
self._config_is_lambda_context = bool(self.aws_lambda_function_name)
119+
return self._config_is_lambda_context
120+
121+
@property
122+
def fips_mode_enabled(self):
123+
if not hasattr(self, "_config_fips_mode_enabled"):
124+
self._config_fips_mode_enabled = (
125+
os.environ.get(
126+
"DD_LAMBDA_FIPS_MODE",
127+
"true" if self.is_gov_region else "false",
128+
).lower()
129+
== "true"
130+
)
131+
return self._config_fips_mode_enabled
132+
133+
def _reset(self):
134+
for attr in dir(self):
135+
if attr.startswith("_config_"):
136+
delattr(self, attr)
137+
138+
139+
config = Config()
140+
141+
if config.is_gov_region or config.fips_mode_enabled:
142+
logger.debug(
143+
"Python Lambda Layer FIPS mode is %s.",
144+
"enabled" if config.fips_mode_enabled else "not enabled",
145+
)

datadog_lambda/fips.py

Lines changed: 0 additions & 19 deletions
This file was deleted.

datadog_lambda/metric.py

Lines changed: 5 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -5,14 +5,13 @@
55

66
import enum
77
import logging
8-
import os
98
import time
109
from datetime import datetime, timedelta
1110

1211
import ujson as json
1312

13+
from datadog_lambda.config import config
1414
from datadog_lambda.extension import should_use_extension
15-
from datadog_lambda.fips import fips_mode_enabled
1615
from datadog_lambda.tags import dd_lambda_layer_tag, get_enhanced_metrics_tags
1716

1817
logger = logging.getLogger(__name__)
@@ -28,10 +27,10 @@ class MetricsHandler(enum.Enum):
2827
def _select_metrics_handler():
2928
if should_use_extension:
3029
return MetricsHandler.EXTENSION
31-
if os.environ.get("DD_FLUSH_TO_LOG", "").lower() == "true":
30+
if config.flush_to_log:
3231
return MetricsHandler.FORWARDER
3332

34-
if fips_mode_enabled:
33+
if config.fips_mode_enabled:
3534
logger.debug(
3635
"With FIPS mode enabled, the Datadog API metrics handler is unavailable."
3736
)
@@ -58,14 +57,8 @@ def _select_metrics_handler():
5857
from datadog_lambda.api import init_api
5958
from datadog_lambda.thread_stats_writer import ThreadStatsWriter
6059

61-
flush_in_thread = os.environ.get("DD_FLUSH_IN_THREAD", "").lower() == "true"
6260
init_api()
63-
lambda_stats = ThreadStatsWriter(flush_in_thread)
64-
65-
66-
enhanced_metrics_enabled = (
67-
os.environ.get("DD_ENHANCED_METRICS", "true").lower() == "true"
68-
)
61+
lambda_stats = ThreadStatsWriter(config.flush_in_thread)
6962

7063

7164
def lambda_metric(metric_name, value, timestamp=None, tags=None, force_async=False):
@@ -191,7 +184,7 @@ def submit_enhanced_metric(metric_name, lambda_context):
191184
metric_name (str): metric name w/o enhanced prefix i.e. "invocations" or "errors"
192185
lambda_context (object): Lambda context dict passed to the function by AWS
193186
"""
194-
if not enhanced_metrics_enabled:
187+
if not config.enhanced_metrics_enabled:
195188
logger.debug(
196189
"Not submitting enhanced metric %s because enhanced metrics are disabled",
197190
metric_name,

datadog_lambda/patch.py

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
# This product includes software developed at Datadog (https://www.datadoghq.com/).
44
# Copyright 2019 Datadog, Inc.
55

6-
import os
76
import sys
87
import logging
98
import zlib
@@ -13,10 +12,8 @@
1312
from wrapt.importer import when_imported
1413
from ddtrace import patch_all as patch_all_dd
1514

16-
from datadog_lambda.tracing import (
17-
get_dd_trace_context,
18-
dd_tracing_enabled,
19-
)
15+
from datadog_lambda.config import config
16+
from datadog_lambda.tracing import get_dd_trace_context
2017
from collections.abc import MutableMapping
2118

2219
logger = logging.getLogger(__name__)
@@ -32,7 +29,7 @@ def patch_all():
3229
"""
3330
_patch_for_integration_tests()
3431

35-
if dd_tracing_enabled:
32+
if config.trace_enabled:
3633
patch_all_dd()
3734
else:
3835
_patch_http()
@@ -44,8 +41,7 @@ def _patch_for_integration_tests():
4441
Patch `requests` to log the outgoing requests for integration tests.
4542
"""
4643
global _integration_tests_patched
47-
is_in_tests = os.environ.get("DD_INTEGRATION_TEST", "false").lower() == "true"
48-
if not _integration_tests_patched and is_in_tests:
44+
if not _integration_tests_patched and config.integration_test:
4945
wrap("requests", "Session.send", _log_request)
5046
_integration_tests_patched = True
5147

0 commit comments

Comments
 (0)
0