8000 [PBCKP-423] test_archive_push_sanity: wait logs with tail_file · postgrespro/pg_probackup@22bdbb3 · GitHub
[go: up one dir, main page]

Skip to content

Commit 22bdbb3

Browse files
committed
[PBCKP-423] test_archive_push_sanity: wait logs with tail_file
(and some other test as well).
1 parent 8f4e7d6 commit 22bdbb3

File tree

3 files changed

+123
-52
lines changed

3 files changed

+123
-52
lines changed

tests/archive_test.py

Lines changed: 44 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
import gzip
44
import unittest
55
from .helpers.ptrack_helpers import ProbackupTest, ProbackupException, GdbException
6+
from .helpers.data_helpers import tail_file
67
from datetime import datetime, timedelta
78
import subprocess
89
from sys import exit
@@ -383,26 +384,31 @@ def test_archive_push_file_exists(self):
383384
self.switch_wal_segment(node)
384385
sleep(1)
385386

386-
with open(log_file, 'r') as f:
387-
log_content = f.read()
387+
log = tail_file(log_file, linetimeout=30, totaltimeout=120,
388+
collect=True)
389+
log.wait(contains = 'The failed archive command was')
390+
388391
self.assertIn(
389392
'LOG: archive command failed with exit code 1',
390-
log_content)
393+
log.content)
391394

392395
self.assertIn(
393396
'DETAIL: The failed archive command was:',
394-
log_content)
397+
log.content)
395398

396399
self.assertIn(
397400
'pg_probackup archive-push WAL file',
398-
log_content)
401+
log.content)
399402

400403
self.assertIn(
401404
'WAL file already exists in archive with different checksum',
402-
log_content)
405+
log.content)
403406

404407
self.assertNotIn(
405-
'pg_probackup archive-push completed successfully', log_content)
408+
'pg_probackup archive-push completed successfully', log.content)
409+
410+
# btw check that console coloring codes are not slipped into log file
411+
self.assertNotIn('[0m', log.content)
406412

407413
if self.get_version(node) < 100000:
408414
wal_src = os.path.join(
@@ -419,19 +425,9 @@ def test_archive_push_file_exists(self):
419425
shutil.copyfile(wal_src, file)
420426

421427
self.switch_wal_segment(node)
422-
sleep(5)
423-
424-
with open(log_file, 'r') as f:
425-
log_content = f.read()
426-
427-
self.assertIn(
428-
'pg_probackup archive-push completed successfully',
429-
log_content)
430-
431-
# btw check that console coloring codes are not slipped into log file
432-
self.assertNotIn('[0m', log_content)
433428

434-
print(log_content)
429+
log.stop_collect()
430+
log.wait(contains = 'pg_probackup archive-push completed successfully')
435431

436432
# @unittest.skip("skip")
437433
def test_archive_push_file_exists_overwrite(self):
@@ -471,39 +467,35 @@ def test_archive_push_file_exists_overwrite(self):
471467
self.switch_wal_segment(node)
472468
sleep(1)
473469

474-
with open(log_file, 'r') as f:
475-
log_content = f.read()
470+
log = tail_file(log_file, linetimeout=30, collect=True)
471+
log.wait(contains = 'The failed archive command was')
476472

477473
self.assertIn(
478-
'LOG: archive command failed with exit code 1', log_content)
474+
'LOG: archive command failed with exit code 1', log.content)
479475
self.assertIn(
480-
'DETAIL: The failed archive command was:', log_content)
476+
'DETAIL: The failed archive command was:', log.content)
481477
self.assertIn(
482-
'pg_probackup archive-push WAL file', log_content)
478+
'pg_probackup archive-push WAL file', log.content)
483479
self.assertNotIn(
484480
'WAL file already exists in archive with '
485-
'different checksum, overwriting', log_content)
481+
'different checksum, overwriting', log.co A3E2 ntent)
486482
self.assertIn(
487483
'WAL file already exists in archive with '
488-
'different checksum', log_content)
484+
'different checksum', log.content)
489485

490486
self.assertNotIn(
491-
'pg_probackup archive-push completed successfully', log_content)
487+
'pg_probackup archive-push completed successfully', log.content)
492488

493489
self.set_archiving(backup_dir, 'node', node, overwrite=True)
494490
node.reload()
495491
self.switch_wal_segment(node)
496-
sleep(5)
497492

498-
with open(log_file, 'r') as f:
499-
log_content = f.read()
500-
self.assertTrue(
501-
'pg_probackup archive-push completed successfully' in log_content,
502-
'Expecting messages about successfull execution archive_command')
493+
log.drop_content()
494+
log.wait(contains = 'pg_probackup archive-push completed successfully')
503495

504496
self.assertIn(
505497
'WAL file already exists in archive with '
506-
'different checksum, overwriting', log_content)
498+
'different checksum, overwriting', log.content)
507499

508500
# @unittest.skip("skip")
509501
def test_archive_push_partial_file_exists(self):
@@ -2049,14 +2041,22 @@ def test_archive_push_sanity(self):
20492041
replica.promote()
20502042
replica.pgbench_init(scale=10)
20512043

2052-
with open(os.path.join(replica.logs_dir, 'postgresql.log'), 'r') as f:
2053-
replica_log_content = f.read()
2044+
log = tail_file(os.path.join(replica.logs_dir, 'postgresql.log'),
2045+
collect=True)
2046+
log.wait(regex=r"pushing file.*history")
2047+
log.wait(contains='archive-push completed successfully')
2048+
log.wait(regex=r"pushing file.*partial")
2049+
log.wait(contains='archive-push completed successfully')
20542050

20552051
# make sure that .partial file is not compressed
2056-
self.assertNotIn('.partial.gz', replica_log_content)
2052+
self.assertNotIn('.partial.gz', log.content)
20572053
# make sure that .history file is not compressed
2058-
self.assertNotIn('.history.gz', replica_log_content)
2059-
self.assertNotIn('WARNING', replica_log_content)
2054+
self.assertNotIn('.history.gz', log.content)
2055+
2056+
replica.stop()
2057+
log.wait_shutdown()
2058+
2059+
self.assertNotIn('WARNING', log.content)
20602060

20612061
output = self.show_archive(
20622062
backup_dir, 'node', as_json=False, as_text=True,
@@ -2440,18 +2440,11 @@ def test_archive_get_prefetch_corruption(self):
24402440
os.remove(os.path.join(replica.logs_dir, 'postgresql.log'))
24412441
replica.slow_start(replica=True)
24422442

2443-
sleep(60)
2444-
2445-
with open(os.path.join(replica.logs_dir, 'postgresql.log'), 'r') as f:
2446-
postgres_log_content = f.read()
2447-
2448-
self.assertIn(
2449-
'Prefetched WAL segment {0} is invalid, cannot use it'.format(filename),
2450-
postgres_log_content)
2451-
2452-
self.assertIn(
2453-
'LOG: restored log file "{0}" from archive'.format(filename),
2454-
postgres_log_content)
2443+
prefetch_line = 'Prefetched WAL segment {0} is invalid, cannot use it'.format(filename)
2444+
restored_line = 'LOG: restored log file "{0}" from archive'.format(filename)
2445+
tailer = tail_file(os.path.join(replica.logs_dir, 'postgresql.log'))
2446+
tailer.wait(contains=prefetch_line)
2447+
tailer.wait(contains=restored_line)
24552448

24562449
# @unittest.skip("skip")
24572450
def test_archive_show_partial_files_handling(self):

tests/helpers/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
__all__ = ['ptrack_helpers', 'cfs_helpers', 'expected_errors']
1+
__all__ = ['ptrack_helpers', 'cfs_helpers', 'data_helpers']
22

33
import unittest
44

tests/helpers/data_helpers.py

Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,78 @@
1+
import re
2+
import unittest
3+
import functools
4+
import time
5+
6+
def _tail_file(file, linetimeout, totaltimeout):
7+
start = time.time()
8+
with open(file, 'r') as f:
9+
waits = 0
10+
while waits < linetimeout:
11+
line = f.readline()
12+
if line == '':
13+
waits += 1
14+
time.sleep(1)
15+
continue
16+
waits = 0
17+
yield line
18+
if time.time() - start > totaltimeout:
19+
raise TimeoutError("total timeout tailing %s" % (file,))
20+
else:
21+
raise TimeoutError("line timeout tailing %s" % (file,))
22+
23+
24+
class tail_file(object): # snake case to immitate function
25+
def __init__(self, filename, *, linetimeout=10, totaltimeout=60, collect=False):
26+
self.filename = filename
27+
self.tailer = _tail_file(filename, linetimeout, totaltimeout)
28+
self.collect = collect
29+
self.lines = []
30+
self._content = None
31+
32+
def __iter__(self):
33+
return self
34+
35+
def __next__(self):
36+
line = next(self.tailer)
37+
if self.collect:
38+
self.lines.append(line)
39+
self._content = None
40+
return line
41+
42+
@property
43+
def content(self):
44+
if not self.collect:
45+
raise AttributeError("content collection is not enabled",
46+
name="content", obj=self)
47+
if not self._content:
48+
self._content = "".join(self.lines)
49+
return self._content
50+
51+
def drop_content(self):
52+
self.lines.clear()
53+
self._content = None
54+
55+
def stop_collect(self):
56+
self.drop_content()
57+
self.collect = False
58+
59+
def wait(self, *, contains:str = None, regex:str = None):
60+
assert contains != None or regex != None
61+
assert contains == None or regex == None
62+
try:
63+
for line in self:
64+
if contains is not None and contains in line:
65+
break
66+
if regex is not None and re.search(regex, line):
67+
break
68+
except TimeoutError:
69+
msg = "Didn't found expected "
70+
if contains is not None:
71+
msg += repr(contains)
72+
elif regex is not None:
73+
msg += f"/{regex}/"
74+
msg += f" in {self.filename}"
75+
raise unittest.TestCase.failureException(msg)
76+
77+
def wait_shutdown(self):
78+
self.wait(contains='database system is shut down')

0 commit comments

Comments
 (0)
0