8000 gh-131757: allow lru_cache functions to execute concurrently (#131758) · python/cpython@4c12a2d · GitHub
[go: up one dir, main page]

Skip to content

Commit 4c12a2d

Browse files
authored
gh-131757: allow lru_cache functions to execute concurrently (#131758)
1 parent 45c447b commit 4c12a2d

File tree

3 files changed

+94
-42
lines changed

3 files changed

+94
-42
lines changed

Include/internal/pycore_pyatomic_ft_wrappers.h

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -109,6 +109,8 @@ extern "C" {
109109
_Py_atomic_store_ullong_relaxed(&value, new_value)
110110
#define FT_ATOMIC_LOAD_ULLONG_RELAXED(value) \
111111
_Py_atomic_load_ullong_relaxed(&value)
112+
#define FT_ATOMIC_ADD_SSIZE(value, new_value) \
113+
(void)_Py_atomic_add_ssize(&value, new_value)
112114

113115
#else
114116
#define FT_ATOMIC_LOAD_PTR(value) value
@@ -156,6 +158,7 @@ extern "C" {
156158
#define FT_ATOMIC_STORE_LLONG_RELAXED(value, new_value) value = new_value
157159
#define FT_ATOMIC_LOAD_ULLONG_RELAXED(value) value
158160
#define FT_ATOMIC_STORE_ULLONG_RELAXED(value, new_value) value = new_value
161+
#define FT_ATOMIC_ADD_SSIZE(value, new_value) (void)(value += new_value)
159162

160163
#endif
161164

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Make :func:`functools.lru_cache` call the cached function unlocked to allow concurrency.

Modules/_functoolsmodule.c

Lines changed: 90 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
#include "pycore_long.h" // _PyLong_GetZero()
55
#include "pycore_moduleobject.h" // _PyModule_GetState()
66
#include "pycore_object.h" // _PyObject_GC_TRACK
7+
#include "pycore_pyatomic_ft_wrappers.h"
78
#include "pycore_pystate.h" // _PyThreadState_GET()
89
#include "pycore_tuple.h" // _PyTuple_ITEMS()
910

@@ -40,7 +41,6 @@ get_functools_state(PyObject *module)
4041
return (_functools_state *)state;
4142
}
4243

43-
4444
/* partial object **********************************************************/
4545

4646

@@ -1016,7 +1016,7 @@ _functools_reduce_impl(PyObject *module, PyObject *func, PyObject *seq,
10161016

10171017
if (result == NULL)
10181018
PyErr_SetString(PyExc_TypeError,
1019-
"reduce() of empty iterable with no initial value");
1019+
"reduce() of empty iterable with no initial value");
10201020

10211021
Py_DECREF(it);
10221022
return result;
@@ -1173,7 +1173,7 @@ uncached_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwd
11731173
{
11741174
PyObject *result;
11751175

1176-
self->misses++;
1176+
FT_ATOMIC_ADD_SSIZE(self->misses, 1);
11771177
result = PyObject_Call(self->func, args, kwds);
11781178
if (!result)
11791179
return NULL;
@@ -1193,18 +1193,17 @@ infinite_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwd
11931193
Py_DECREF(key);
11941194
return NULL;
11951195
}
1196-
result = _PyDict_GetItem_KnownHash(self->cache, key, hash);
1197-
if (result) {
1198-
Py_INCREF(result);
1199-
self->hits++;
1196+
int res = _PyDict_GetItemRef_KnownHash((PyDictObject *)self->cache, key, hash, &result);
1197+
if (res > 0) {
1198+
FT_ATOMIC_ADD_SSIZE(self->hits, 1);
12001199
Py_DECREF(key);
12011200
return result;
12021201
}
1203-
if (PyErr_Occurred()) {
1202+
if (res < 0) {
12041203
Py_DECREF(key);
12051204
return NULL;
12061205
}
1207-
self->misses++;
1206+
FT_ATOMIC_ADD_SSIZE(self->misses, 1);
12081207
result = PyObject_Call(self->func, args, kwds);
12091208
if (!result) {
12101209
Py_DECREF(key);
@@ -1279,50 +1278,65 @@ lru_cache_prepend_link(lru_cache_object *self, lru_list_elem *link)
12791278
so that we know the cache is a consistent state.
12801279
*/
12811280

1282-
static PyObject *
1283-
bounded_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwds)
1281+
static int
1282+
bounded_lru_cache_get_lock_held(lru_cache_object *self, PyObject *args, PyObject *kwds,
1283+
PyObject **result, PyObject **key, Py_hash_t *hash)
12841284
{
1285+
_Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(self);
12851286
lru_list_elem *link;
1286-
PyObject *key, *result, *testresult;
1287-
Py_hash_t hash;
12881287

1289-
key = lru_cache_make_key(self->kwd_mark, args, kwds, self->typed);
1290-
if (!key)
1291-
return NULL;
1292-
hash = PyObject_Hash(key);
1293-
if (hash == -1) {
1294-
Py_DECREF(key);
1295-
return NULL;
1288+
PyObject *key_ = *key = lru_cache_make_key(self->kwd_mark, args, kwds, self->typed);
1289+
if (!key_)
1290+
return -1;
1291+
Py_hash_t hash_ = *hash = PyObject_Hash(key_);
1292+
if (hash_ == -1) {
1293+
Py_DECREF(key_); /* dead reference left in *key, is not used */
1294+
return -1;
12961295
}
1297-
link = (lru_list_elem *)_PyDict_GetItem_KnownHash(self->cache, key, hash);
1298-
if (link != NULL) {
1296+
int res = _PyDict_GetItemRef_KnownHash_LockHeld((PyDictObject *)self->cache, key_, hash_,
1297+
(PyObject **)&link);
1298+
if (res > 0) {
12991299
lru_cache_extract_link(link);
13001300
lru_cache_append_link(self, link);
1301-
result = link->result;
1302-
self->hits++;
1303-
Py_INCREF(result);
1304-
Py_DECREF(key);
1305-
return result;
1301+
*result = link->result;
1302+
FT_ATOMIC_ADD_SSIZE(self->hits, 1);
1303+
Py_INCREF(link->result);
1304+
Py_DECREF(link);
1305+
Py_DECREF(key_);
1306+
return 1;
13061307
}
1307-
if (PyErr_Occurred()) {
1308-
Py_DECREF(key);
1309-
return NULL;
1308+
if (res < 0) {
1309+
Py_DECREF(key_);
1310+
return -1;
13101311
}
1311-
self->misses++;
1312-
result = PyObject_Call(self->func, args, kwds);
1312+
FT_ATOMIC_ADD_SSIZE(self->misses, 1);
1313+
return 0;
1314+
}
1315+
1316+
static PyObject *
1317+
bounded_lru_cache_update_lock_held(lru_cache_object *self,
1318+
PyObject *result, PyObject *key, Py_hash_t hash)
1319+
{
1320+
_Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(self);
1321+
lru_list_elem *link;
1322+
PyObject *testresult;
1323+
int res;
1324+
13131325
if (!result) {
13141326
Py_DECREF(key);
13151327
return NULL;
13161328
}
1317-
testresult = _PyDict_GetItem_KnownHash(self->cache, key, hash);
1318-
if (testresult != NULL) {
1329+
res = _PyDict_GetItemRef_KnownHash_LockHeld((PyDictObject *)self->cache, key, hash,
1330+
&testresult);
1331+
if (res > 0) {
13191332
/* Getting here means that this same key was added to the cache
13201333
during the PyObject_Call(). Since the link update is already
13211334
done, we need only return the computed result. */
1335+
Py_DECREF(testresult);
13221336
Py_DECREF(key);
13231337
return result;
13241338
}
1325-
if (PyErr_Occurred()) {
1339+
if (res < 0) {
13261340
/* This is an unusual case since this same lookup
13271341
did not previously trigger an error during lookup.
13281342
Treat it the same as an error in user function
@@ -1386,8 +1400,8 @@ bounded_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwds
13861400
The cache dict holds one reference to the link.
13871401
We created one other reference when the link was created.
13881402
The linked list only has borrowed references. */
1389-
int res = _PyDict_Pop_KnownHash((PyDictObject*)self->cache, link->key,
1390-
link->hash, &popresult);
1403+
res = _PyDict_Pop_KnownHash((PyDictObject*)self->cache, link->key,
1404+
link->hash, &popresult);
13911405
if (res < 0) {
13921406
/* An error arose while trying to remove the oldest key (the one
13931407
being evicted) from the cache. We restore the link to its
@@ -1445,6 +1459,37 @@ bounded_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwds
14451459
return result;
14461460
}
14471461

1462+
static PyObject *
1463+
bounded_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwds)
1464+
{
1465+
PyObject *key, *result;
1466+
Py_hash_t hash;
1467+
int res;
1468+
1469+
Py_BEGIN_CRITICAL_SECTION(self);
1470+
res = bounded_lru_cache_get_lock_held(self, args, kwds, &result, &key, &hash);
1471+
Py_END_CRITICAL_SECTION();
1472+
1473+
if (res < 0) {
1474+
return NULL;
1475+
}
1476+
if (res > 0) {
1477+
return result;
1478+
}
1479+
1480+
result = PyObject_Call(self->func, args, kwds);
1481+
1482+
Py_BEGIN_CRITICAL_SECTION(self);
1483+
/* Note: key will be stolen in the below function, and
1484+
result may be stolen or sometimes re-returned as a passthrough.
1485+
Treat both as being stolen.
1486+
*/
1487+
result = bounded_lru_cache_update_lock_held(self, result, key, hash);
1488+
Py_END_CRITICAL_SECTION();
1489+
1490+
return result;
1491+
}
1492+
14481493
static PyObject *
14491494
lru_cache_new(PyTypeObject *type, PyObject *args, PyObject *kw)
14501495
{
@@ -1577,9 +1622,7 @@ lru_cache_call(PyObject *op, PyObject *args, PyObject *kwds)
15771622
{
15781623
lru_cache_object *self = lru_cache_object_CAST(op);
15791624
PyObject *result;
1580-
Py_BEGIN_CRITICAL_SECTION(self);
15811625
result = self->wrapper(self, args, kwds);
1582-
Py_END_CRITICAL_SECTION();
15831626
return result;
15841627
}
15851628

@@ -1606,11 +1649,15 @@ _functools__lru_cache_wrapper_cache_info_impl(PyObject *self)
16061649
lru_cache_object *_self = (lru_cache_object *) self;
16071650
if (_self->maxsize == -1) {
16081651
return PyObject_CallFunction(_self->cache_info_type, "nnOn",
1609-
_self->hits, _self->misses, Py_None,
1652+
FT_ATOMIC_LOAD_SSIZE_RELAXED(_self->hits),
1653+
FT_ATOMIC_LOAD_SSIZE_RELAXED(_self->misses),
1654+
Py_None,
16101655
PyDict_GET_SIZE(_self->cache));
16111656
}
16121657
return PyObject_CallFunction(_self->cache_info_type, "nnnn",
1613-
_self->hits, _self->misses, _self->maxsize,
1658+
FT_ATOMIC_LOAD_SSIZE_RELAXED(_self->hits),
1659+
FT_ATOMIC_LOAD_SSIZE_RELAXED(_self->misses),
1660+
_self->maxsize,
16141661
PyDict_GET_SIZE(_self->cache));
16151662
}
16161663

@@ -1627,7 +1674,8 @@ _functools__lru_cache_wrapper_cache_clear_impl(PyObject *self)
16271674
{
16281675
lru_cache_object *_self = (lru_cache_object *) self;
16291676
lru_list_elem *list = lru_cache_unlink_list(_self);
1630-
_self->hits = _self->misses = 0;
1677+
FT_ATOMIC_STORE_SSIZE_RELAXED(_self->hits, 0);
1678+
FT_ATOMIC_STORE_SSIZE_RELAXED(_self->misses, 0);
16311679
PyDict_Clear(_self->cache);
16321680
lru_cache_clear_list(list);
16331681
Py_RETURN_NONE;

0 commit comments

Comments
 (0)
0