8000 Drop samples with duplicate timestamps, presumably due to truncation · sxlstevengit/client_python@5c5c3e2 · GitHub
[go: up one dir, main page]

Skip to content

Commit 5c5c3e2

Browse files
committed
Drop samples with duplicate timestamps, presumably due to truncation
Signed-off-by: Brian Brazil <brian.brazil@robustperception.io>
1 parent f4c08cd commit 5c5c3e2

File tree

2 files changed

+28
-5
lines changed

2 files changed

+28
-5
lines changed

prometheus_client/openmetrics/parser.py

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -317,7 +317,6 @@ def build_metric(name, documentation, typ, unit, samples):
317317
_check_histogram(samples, name)
318318
metric = core.Metric(name, documentation, typ, unit)
319319
# TODO: check labelvalues are valid utf8
320-
# TODO: Check for duplicate samples
321320
metric.samples = samples
322321
return metric
323322

@@ -347,6 +346,7 @@ def build_metric(name, documentation, typ, unit, samples):
347346
group = None
348347
seen_groups = set()
349348
group_timestamp = None
349+
group_timestamp_samples = set()
350350
samples = []
351351
allowed_names = [parts[2]]
352352

@@ -387,13 +387,12 @@ def build_metric(name, documentation, typ, unit, samples):
387387
documentation = None
388388
unit = None
389389
typ = 'unknown'
390-
samples = [sample]
390+
samples = []
391391
group = None
392392
group_timestamp = None
393+
group_timestamp_samples = set()
393394
seen_groups = set()
394395
allowed_names = [sample.name]
395-
else:
396-
samples.append(sample)
397396

398397
if typ == 'stateset' and name not in sample.labels:
399398
raise ValueError("Stateset missing label: " + line)
@@ -412,6 +411,15 @@ def build_metric(name, documentation, typ, unit, samples):
412411
raise ValueError("Mix of timestamp presence within a group: " + line)
413412
if group_timestamp is not None and group_timestamp > sample.timestamp and typ != 'info':
414413
raise ValueError("Timestamps went backwards within a group: " + line)
414+
else:
415+
group_timestamp_samples = set()
416+
417+
series_id = (sample.name, tuple(sorted(sample.labels.items())))
418+
if sample.timestamp != group_timestamp or series_id not in group_timestamp_samples:
419+
# Not a duplicate due to timestamp truncation.
420+
samples.append(sample)
421+
group_timestamp_samples.add(series_id)
422+
415423
group = g
416424
group_timestamp = sample.timestamp
417425
seen_groups.add(g)

tests/openmetrics/test_parser.py

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -185,6 +185,22 @@ def test_simple_stateset(self):
185185
""")
186186
self.assertEqual([StateSetMetricFamily("a", "help", {'foo': True, 'bar': False})], list(families))
187187

188+
def test_duplicate_timestamps(self):
189+
families = text_string_to_metric_families("""# TYPE a gauge
190+
# HELP a help
191+
a{a="1",foo="bar"} 1 0.0000000000
192+
a{a="1",foo="bar"} 2 0.0000000001
193+
a{a="1",foo="bar"} 3 0.0000000010
194+
a{a="2",foo="bar"} 4 0.0000000000
195+
a{a="2",foo="bar"} 5 0.0000000001
196+
# EOF
197+
""")
198+
imf = GaugeMetricFamily("a", "help")
199+
imf.add_sample("a", {"a": "1", "foo": "bar"}, 1, Timestamp(0, 0))
200+
imf.add_sample("a", {"a": "1", "foo": "bar"}, 3, Timestamp(0, 1))
201+
imf.add_sample("a", {"a": "2", "foo": "bar"}, 4, Timestamp(0, 0))
202+
self.assertEqual([imf], list(families))
203+
188204
def test_no_metadata(self):
189205
families = text_string_to_metric_families("""a 1
190206
# EOF
@@ -533,7 +549,6 @@ def test_invalid_input(self):
533549
('# TYPE a gaugehistogram\na_gsum 1\n# EOF\n'),
534550
('# TYPE a histogram\na_count 1\na_bucket{le="+Inf"} 0\n# EOF\n'),
535551
('# TYPE a histogram\na_bucket{le="+Inf"} 0\na_count 1\n# EOF\n'),
536-
('# TYPE a histogram\na_bucket{le="+Inf"} 0\na_bucket{le="+Inf"} 0\n# EOF\n'),
537552
('# TYPE a histogram\na_bucket{le="2"} 0\na_bucket{le="1"} 0\na_bucket{le="+Inf"} 0\n# EOF\n'),
538553
('# TYPE a histogram\na_bucket{le="1"} 1\na_bucket{le="2"} 1\na_bucket{le="+Inf"} 0\n# EOF\n'),
539554
# Bad grouping or ordering.

0 commit comments

Comments
 (0)
0