8000 bpo-38119: Fix shmem resource tracking by applio · Pull Request #15989 · python/cpython · GitHub
[go: up one dir, main page]

Skip to content
8000

bpo-38119: Fix shmem resource tracking #15989

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
Eliminated premature destruction of shared memory segments by resourc…
…e tracker.
  • Loading branch information
applio committed Sep 11, 2019
commit d0789e253a117df2c90778a38d17bf2e1c749ad8
8 changes: 0 additions & 8 deletions Lib/multiprocessing/managers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1309,14 +1309,6 @@ class SharedMemoryManager(BaseManager):
_Server = SharedMemoryServer

def __init__(self, *args, **kwargs):
if os.name == "posix":
# bpo-36867: Ensure the resource_tracker is running before
# launching the manager process, so that concurrent
# shared_memory manipulation both in the manager and in the
# current process does not create two resource_tracker
# processes.
from . import resource_tracker
resource_tracker.ensure_running()
BaseManager.__init__(self, *args, **kwargs)
util.debug(f"{self.__class__.__name__} created by pid {getpid()}")

Expand Down
1 change: 0 additions & 1 deletion Lib/multiprocessing/resource_tracker.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@

_CLEANUP_FUNCS.update({
'semaphore': _multiprocessing.sem_unlink,
'shared_memory': _posixshmem.shm_unlink,
})


Expand Down
5 changes: 0 additions & 5 deletions Lib/multiprocessing/shared_memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,9 +113,6 @@ def __init__(self, name=None, create=False, size=0):
self.unlink()
raise

from .resource_tracker import register
register(self._name, "shared_memory")

else:

# Windows Named Shared Memory
Expand Down Expand Up @@ -234,9 +231,7 @@ def unlink(self):
called once (and only once) across all processes which have access
to the shared memory block."""
if _USE_POSIX and self._name:
from .resource_tracker import unregister
_posixshmem.shm_unlink(self._name)
unregister(self._name, "shared_memory")


_encoding = "utf8"
Expand Down
43 changes: 0 additions & 43 deletions Lib/test/_test_multiprocessing.py
Original file line number Diff line number Diff line change
Expand Up @@ -4012,49 +4012,6 @@ def test_shared_memory_ShareableList_pickling(self):
deserialized_sl.shm.close()
sl.shm.close()

def test_shared_memory_cleaned_after_process_termination(self):
cmd = '''if 1:
import os, time, sys
from multiprocessing import shared_memory

# Create a shared_memory segment, and send the segment name
sm = shared_memory.SharedMemory(create=True, size=10)
sys.stdout.write(sm.name + '\\n')
sys.stdout.flush()
time.sleep(100)
'''
with subprocess.Popen([sys.executable, '-E', '-c', cmd],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE) as p:
name = p.stdout.readline().strip().decode()

# killing abruptly processes holding reference to a shared memory
# segment should not leak the given memory segment.
p.terminate()
p.wait()

deadline = time.monotonic() + 60
t = 0.1
while time.monotonic() < deadline:
time.sleep(t)
t = min(t*2, 5)
try:
smm = shared_memory.SharedMemory(name, create=False)
except FileNotFoundError:
break
else:
raise AssertionError("A SharedMemory segment was leaked after"
" a process was abruptly terminated.")

if os.name == 'posix':
# A warning was emitted by the subprocess' own
# resource_tracker (on Windows, shared memory segments
# are released automatically by the OS).
err = p.stderr.read().decode()
self.assertIn(
"resource_tracker: There appear to be 1 leaked "
"shared_memory objects to clean up at shutdown", err)

#
#
#
Expand Down
0